XGBoost: A Scalable Tree Boosting System. Chen, T. & Guestrin, C. In Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining, of KDD '16, pages 785–794, New York, NY, USA, August, 2016. Association for Computing Machinery. Paper doi abstract bibtex Tree boosting is a highly effective and widely used machine learning method. In this paper, we describe a scalable end-to-end tree boosting system called XGBoost, which is used widely by data scientists to achieve state-of-the-art results on many machine learning challenges. We propose a novel sparsity-aware algorithm for sparse data and weighted quantile sketch for approximate tree learning. More importantly, we provide insights on cache access patterns, data compression and sharding to build a scalable tree boosting system. By combining these insights, XGBoost scales beyond billions of examples using far fewer resources than existing systems.
@inproceedings{chen_xgboost_2016,
address = {New York, NY, USA},
series = {{KDD} '16},
title = {{XGBoost}: {A} {Scalable} {Tree} {Boosting} {System}},
isbn = {978-1-4503-4232-2},
shorttitle = {{XGBoost}},
url = {https://doi.org/10.1145/2939672.2939785},
doi = {10.1145/2939672.2939785},
abstract = {Tree boosting is a highly effective and widely used machine learning method. In this paper, we describe a scalable end-to-end tree boosting system called XGBoost, which is used widely by data scientists to achieve state-of-the-art results on many machine learning challenges. We propose a novel sparsity-aware algorithm for sparse data and weighted quantile sketch for approximate tree learning. More importantly, we provide insights on cache access patterns, data compression and sharding to build a scalable tree boosting system. By combining these insights, XGBoost scales beyond billions of examples using far fewer resources than existing systems.},
urldate = {2022-08-22},
booktitle = {Proceedings of the 22nd {ACM} {SIGKDD} {International} {Conference} on {Knowledge} {Discovery} and {Data} {Mining}},
publisher = {Association for Computing Machinery},
author = {Chen, Tianqi and Guestrin, Carlos},
month = aug,
year = {2016},
keywords = {large-scale machine learning},
pages = {785--794},
}
Downloads: 0
{"_id":"6QP4KLmtx4ntWW8ZT","bibbaseid":"chen-guestrin-xgboostascalabletreeboostingsystem-2016","author_short":["Chen, T.","Guestrin, C."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","address":"New York, NY, USA","series":"KDD '16","title":"XGBoost: A Scalable Tree Boosting System","isbn":"978-1-4503-4232-2","shorttitle":"XGBoost","url":"https://doi.org/10.1145/2939672.2939785","doi":"10.1145/2939672.2939785","abstract":"Tree boosting is a highly effective and widely used machine learning method. In this paper, we describe a scalable end-to-end tree boosting system called XGBoost, which is used widely by data scientists to achieve state-of-the-art results on many machine learning challenges. We propose a novel sparsity-aware algorithm for sparse data and weighted quantile sketch for approximate tree learning. More importantly, we provide insights on cache access patterns, data compression and sharding to build a scalable tree boosting system. By combining these insights, XGBoost scales beyond billions of examples using far fewer resources than existing systems.","urldate":"2022-08-22","booktitle":"Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining","publisher":"Association for Computing Machinery","author":[{"propositions":[],"lastnames":["Chen"],"firstnames":["Tianqi"],"suffixes":[]},{"propositions":[],"lastnames":["Guestrin"],"firstnames":["Carlos"],"suffixes":[]}],"month":"August","year":"2016","keywords":"large-scale machine learning","pages":"785–794","bibtex":"@inproceedings{chen_xgboost_2016,\n\taddress = {New York, NY, USA},\n\tseries = {{KDD} '16},\n\ttitle = {{XGBoost}: {A} {Scalable} {Tree} {Boosting} {System}},\n\tisbn = {978-1-4503-4232-2},\n\tshorttitle = {{XGBoost}},\n\turl = {https://doi.org/10.1145/2939672.2939785},\n\tdoi = {10.1145/2939672.2939785},\n\tabstract = {Tree boosting is a highly effective and widely used machine learning method. In this paper, we describe a scalable end-to-end tree boosting system called XGBoost, which is used widely by data scientists to achieve state-of-the-art results on many machine learning challenges. We propose a novel sparsity-aware algorithm for sparse data and weighted quantile sketch for approximate tree learning. More importantly, we provide insights on cache access patterns, data compression and sharding to build a scalable tree boosting system. By combining these insights, XGBoost scales beyond billions of examples using far fewer resources than existing systems.},\n\turldate = {2022-08-22},\n\tbooktitle = {Proceedings of the 22nd {ACM} {SIGKDD} {International} {Conference} on {Knowledge} {Discovery} and {Data} {Mining}},\n\tpublisher = {Association for Computing Machinery},\n\tauthor = {Chen, Tianqi and Guestrin, Carlos},\n\tmonth = aug,\n\tyear = {2016},\n\tkeywords = {large-scale machine learning},\n\tpages = {785--794},\n}\n\n\n\n","author_short":["Chen, T.","Guestrin, C."],"key":"chen_xgboost_2016","id":"chen_xgboost_2016","bibbaseid":"chen-guestrin-xgboostascalabletreeboostingsystem-2016","role":"author","urls":{"Paper":"https://doi.org/10.1145/2939672.2939785"},"keyword":["large-scale machine learning"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"inproceedings","biburl":"https://bibbase.org/zotero/mh_lenguyen","dataSources":["iwKepCrWBps7ojhDx"],"keywords":["large-scale machine learning"],"search_terms":["xgboost","scalable","tree","boosting","system","chen","guestrin"],"title":"XGBoost: A Scalable Tree Boosting System","year":2016}