Instance Selection and Feature Weighting Using Evolutionary Algorithms. Ramirez-Cruz, J., Fuentes, O., Alarcon-Aquino, V., & Garcia-Banuelos, L. In 2006 15th International Conference on Computing, pages 73-79, 11, 2006. IEEE. Website doi abstract bibtex Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real world applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a genetic algorithm (GA) and evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier.
@inproceedings{
title = {Instance Selection and Feature Weighting Using Evolutionary Algorithms},
type = {inproceedings},
year = {2006},
pages = {73-79},
websites = {http://ieeexplore.ieee.org/document/4023791/},
month = {11},
publisher = {IEEE},
id = {9d25b946-cb70-358e-972c-8796b7984187},
created = {2022-08-29T17:43:37.369Z},
file_attached = {false},
profile_id = {940dd160-7d67-3a5f-b9f8-935da0571367},
group_id = {92fccab2-8d44-33bc-b301-7b94bb18523c},
last_modified = {2022-08-29T17:43:37.369Z},
read = {false},
starred = {false},
authored = {false},
confirmed = {true},
hidden = {false},
source_type = {CONF},
private_publication = {false},
abstract = {Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real world applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a genetic algorithm (GA) and evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier.},
bibtype = {inproceedings},
author = {Ramirez-Cruz, Jose-Federico and Fuentes, Olac and Alarcon-Aquino, Vicente and Garcia-Banuelos, Luciano},
doi = {10.1109/CIC.2006.42},
booktitle = {2006 15th International Conference on Computing}
}
Downloads: 0
{"_id":"XQjQhX9JNcD9YKBnj","bibbaseid":"ramirezcruz-fuentes-alarconaquino-garciabanuelos-instanceselectionandfeatureweightingusingevolutionaryalgorithms-2006","downloads":0,"creationDate":"2018-11-16T03:23:24.304Z","title":"Instance Selection and Feature Weighting Using Evolutionary Algorithms","author_short":["Ramirez-Cruz, J.","Fuentes, O.","Alarcon-Aquino, V.","Garcia-Banuelos, L."],"year":2006,"bibtype":"inproceedings","biburl":"https://bibbase.org/service/mendeley/940dd160-7d67-3a5f-b9f8-935da0571367","bibdata":{"title":"Instance Selection and Feature Weighting Using Evolutionary Algorithms","type":"inproceedings","year":"2006","pages":"73-79","websites":"http://ieeexplore.ieee.org/document/4023791/","month":"11","publisher":"IEEE","id":"9d25b946-cb70-358e-972c-8796b7984187","created":"2022-08-29T17:43:37.369Z","file_attached":false,"profile_id":"940dd160-7d67-3a5f-b9f8-935da0571367","group_id":"92fccab2-8d44-33bc-b301-7b94bb18523c","last_modified":"2022-08-29T17:43:37.369Z","read":false,"starred":false,"authored":false,"confirmed":"true","hidden":false,"source_type":"CONF","private_publication":false,"abstract":"Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real world applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a genetic algorithm (GA) and evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier.","bibtype":"inproceedings","author":"Ramirez-Cruz, Jose-Federico and Fuentes, Olac and Alarcon-Aquino, Vicente and Garcia-Banuelos, Luciano","doi":"10.1109/CIC.2006.42","booktitle":"2006 15th International Conference on Computing","bibtex":"@inproceedings{\n title = {Instance Selection and Feature Weighting Using Evolutionary Algorithms},\n type = {inproceedings},\n year = {2006},\n pages = {73-79},\n websites = {http://ieeexplore.ieee.org/document/4023791/},\n month = {11},\n publisher = {IEEE},\n id = {9d25b946-cb70-358e-972c-8796b7984187},\n created = {2022-08-29T17:43:37.369Z},\n file_attached = {false},\n profile_id = {940dd160-7d67-3a5f-b9f8-935da0571367},\n group_id = {92fccab2-8d44-33bc-b301-7b94bb18523c},\n last_modified = {2022-08-29T17:43:37.369Z},\n read = {false},\n starred = {false},\n authored = {false},\n confirmed = {true},\n hidden = {false},\n source_type = {CONF},\n private_publication = {false},\n abstract = {Machine learning algorithms are commonly used in real-world applications for solving complex problems where it is difficult to get a mathematical model. The goal of machine learning algorithms is to learn an objective function from a set of training examples where each example is defined by a feature set. Regularly, real world applications have many examples with many features; however, the objective function depends on few of them. The presence of noisy examples or irrelevant features in a dataset degrades the performance of machine learning algorithms; such is the case of k-nearest neighbor machine learning algorithm (k-NN). Thus choosing good instance and feature subsets may improve the algorithm's performance. Evolutionary algorithms proved to be good techniques for finding solutions in a large solution space and to be stable in the presence of noise. In this work, we address the problem of instance selection and feature weighting for instance-based methods by means of a genetic algorithm (GA) and evolution strategies (ES). We show that combining GA and ES with a k-NN algorithm can improve the predictive accuracy of the resulting classifier.},\n bibtype = {inproceedings},\n author = {Ramirez-Cruz, Jose-Federico and Fuentes, Olac and Alarcon-Aquino, Vicente and Garcia-Banuelos, Luciano},\n doi = {10.1109/CIC.2006.42},\n booktitle = {2006 15th International Conference on Computing}\n}","author_short":["Ramirez-Cruz, J.","Fuentes, O.","Alarcon-Aquino, V.","Garcia-Banuelos, L."],"urls":{"Website":"http://ieeexplore.ieee.org/document/4023791/"},"biburl":"https://bibbase.org/service/mendeley/940dd160-7d67-3a5f-b9f8-935da0571367","bibbaseid":"ramirezcruz-fuentes-alarconaquino-garciabanuelos-instanceselectionandfeatureweightingusingevolutionaryalgorithms-2006","role":"author","metadata":{"authorlinks":{}},"downloads":0},"search_terms":["instance","selection","feature","weighting","using","evolutionary","algorithms","ramirez-cruz","fuentes","alarcon-aquino","garcia-banuelos"],"keywords":[],"authorIDs":[],"dataSources":["uTykyhFv6T7J2dX6z","ya2CyA73rpZseyrZ8","nyWSrx2yzJEsoe49A","2252seNhipfTmjEBQ"]}