Robust alternating AdaBoost. In Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics), volume 4756 LNCS, pages 427-436, 2007. abstract bibtex Ensemble methods are general techniques to improve the accuracy of any given learning algorithm. Boosting is a learning algorithm that builds the classifier ensembles incrementally. In this work we propose an improvement of the classical and inverse AdaBoost algorithms to deal with the problem of the presence of outliers in the data. We propose the Robust Alternating AdaBoost (RADA) algorithm that alternates between the classic and inverse AdaBoost to create a more stable algorithm. The RADA algorithm bounds the influence of the outliers to the empirical distribution, it detects and diminishes the empirical probability of "bad" samples, and it performs a more accurate classification under contaminated data. We report the performance results using synthetic and real datasets, the latter obtained from a benchmark site. © Springer-Verlag Berlin Heidelberg 2007.
@inproceedings{38449112056,
abstract = {Ensemble methods are general techniques to improve the accuracy of any given learning algorithm. Boosting is a learning algorithm that builds the classifier ensembles incrementally. In this work we propose an improvement of the classical and inverse AdaBoost algorithms to deal with the problem of the presence of outliers in the data. We propose the Robust Alternating AdaBoost (RADA) algorithm that alternates between the classic and inverse AdaBoost to create a more stable algorithm. The RADA algorithm bounds the influence of the outliers to the empirical distribution, it detects and diminishes the empirical probability of "bad" samples, and it performs a more accurate classification under contaminated data. We report the performance results using synthetic and real datasets, the latter obtained from a benchmark site. © Springer-Verlag Berlin Heidelberg 2007.},
year = "2007",
title = "Robust alternating AdaBoost",
volume = "4756 LNCS",
pages = "427-436",
booktitle = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)"
}
Downloads: 0
{"_id":"GeWt6z9kLjimyENjy","bibbaseid":"anonymous-robustalternatingadaboost-2007","downloads":0,"creationDate":"2017-03-31T20:15:32.683Z","title":"Robust alternating AdaBoost","author_short":null,"year":2007,"bibtype":"inproceedings","biburl":"https://1fichier.com/?j9cpurkmnv","bibdata":{"bibtype":"inproceedings","type":"inproceedings","abstract":"Ensemble methods are general techniques to improve the accuracy of any given learning algorithm. Boosting is a learning algorithm that builds the classifier ensembles incrementally. In this work we propose an improvement of the classical and inverse AdaBoost algorithms to deal with the problem of the presence of outliers in the data. We propose the Robust Alternating AdaBoost (RADA) algorithm that alternates between the classic and inverse AdaBoost to create a more stable algorithm. The RADA algorithm bounds the influence of the outliers to the empirical distribution, it detects and diminishes the empirical probability of \"bad\" samples, and it performs a more accurate classification under contaminated data. We report the performance results using synthetic and real datasets, the latter obtained from a benchmark site. © Springer-Verlag Berlin Heidelberg 2007.","year":"2007","title":"Robust alternating AdaBoost","volume":"4756 LNCS","pages":"427-436","booktitle":"Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)","bibtex":"@inproceedings{38449112056,\n abstract = {Ensemble methods are general techniques to improve the accuracy of any given learning algorithm. Boosting is a learning algorithm that builds the classifier ensembles incrementally. In this work we propose an improvement of the classical and inverse AdaBoost algorithms to deal with the problem of the presence of outliers in the data. We propose the Robust Alternating AdaBoost (RADA) algorithm that alternates between the classic and inverse AdaBoost to create a more stable algorithm. The RADA algorithm bounds the influence of the outliers to the empirical distribution, it detects and diminishes the empirical probability of \"bad\" samples, and it performs a more accurate classification under contaminated data. We report the performance results using synthetic and real datasets, the latter obtained from a benchmark site. © Springer-Verlag Berlin Heidelberg 2007.},\n year = \"2007\",\n title = \"Robust alternating AdaBoost\",\n volume = \"4756 LNCS\",\n pages = \"427-436\",\n booktitle = \"Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)\"\n}\n\n","key":"38449112056","id":"38449112056","bibbaseid":"anonymous-robustalternatingadaboost-2007","urls":{},"downloads":0,"html":""},"search_terms":["robust","alternating","adaboost"],"keywords":[],"authorIDs":[],"dataSources":["gKiCRHjjC2iGthGEx"]}