Decision Trees for Mining Data Streams Based on the McDiarmid's Bound. Rutkowski, L., Pietruczuk, L., Duda, P., & Jaworski, M. IEEE Transactions on Knowledge and Data Engineering, 25(6):1272–1279, June, 2013. Conference Name: IEEE Transactions on Knowledge and Data Engineeringdoi abstract bibtex In mining data streams the most popular tool is the Hoeffding tree algorithm. It uses the Hoeffding's bound to determine the smallest number of examples needed at a node to select a splitting attribute. In the literature the same Hoeffding's bound was used for any evaluation function (heuristic measure), e.g., information gain or Gini index. In this paper, it is shown that the Hoeffding's inequality is not appropriate to solve the underlying problem. We prove two theorems presenting the McDiarmid's bound for both the information gain, used in ID3 algorithm, and for Gini index, used in Classification and Regression Trees (CART) algorithm. The results of the paper guarantee that a decision tree learning system, applied to data streams and based on the McDiarmid's bound, has the property that its output is nearly identical to that of a conventional learner. The results of the paper have a great impact on the state of the art of mining data streams and various developed so far methods and algorithms should be reconsidered.
@article{rutkowski_decision_2013,
title = {Decision {Trees} for {Mining} {Data} {Streams} {Based} on the {McDiarmid}'s {Bound}},
volume = {25},
issn = {1558-2191},
doi = {10.1109/TKDE.2012.66},
abstract = {In mining data streams the most popular tool is the Hoeffding tree algorithm. It uses the Hoeffding's bound to determine the smallest number of examples needed at a node to select a splitting attribute. In the literature the same Hoeffding's bound was used for any evaluation function (heuristic measure), e.g., information gain or Gini index. In this paper, it is shown that the Hoeffding's inequality is not appropriate to solve the underlying problem. We prove two theorems presenting the McDiarmid's bound for both the information gain, used in ID3 algorithm, and for Gini index, used in Classification and Regression Trees (CART) algorithm. The results of the paper guarantee that a decision tree learning system, applied to data streams and based on the McDiarmid's bound, has the property that its output is nearly identical to that of a conventional learner. The results of the paper have a great impact on the state of the art of mining data streams and various developed so far methods and algorithms should be reconsidered.},
number = {6},
journal = {IEEE Transactions on Knowledge and Data Engineering},
author = {Rutkowski, Leszek and Pietruczuk, Lena and Duda, Piotr and Jaworski, Maciej},
month = jun,
year = {2013},
note = {Conference Name: IEEE Transactions on Knowledge and Data Engineering},
keywords = {Data mining, Data streams, Decision trees, Entropy, Gain measurement, Gini index, Hoeffding's bound, Indexes, Learning systems, McDiarmid's bound, Random variables, decision trees, information gain},
pages = {1272--1279},
}
Downloads: 0
{"_id":"AXAYNb3c8kTFQKCLB","bibbaseid":"rutkowski-pietruczuk-duda-jaworski-decisiontreesforminingdatastreamsbasedonthemcdiarmidsbound-2013","author_short":["Rutkowski, L.","Pietruczuk, L.","Duda, P.","Jaworski, M."],"bibdata":{"bibtype":"article","type":"article","title":"Decision Trees for Mining Data Streams Based on the McDiarmid's Bound","volume":"25","issn":"1558-2191","doi":"10.1109/TKDE.2012.66","abstract":"In mining data streams the most popular tool is the Hoeffding tree algorithm. It uses the Hoeffding's bound to determine the smallest number of examples needed at a node to select a splitting attribute. In the literature the same Hoeffding's bound was used for any evaluation function (heuristic measure), e.g., information gain or Gini index. In this paper, it is shown that the Hoeffding's inequality is not appropriate to solve the underlying problem. We prove two theorems presenting the McDiarmid's bound for both the information gain, used in ID3 algorithm, and for Gini index, used in Classification and Regression Trees (CART) algorithm. The results of the paper guarantee that a decision tree learning system, applied to data streams and based on the McDiarmid's bound, has the property that its output is nearly identical to that of a conventional learner. The results of the paper have a great impact on the state of the art of mining data streams and various developed so far methods and algorithms should be reconsidered.","number":"6","journal":"IEEE Transactions on Knowledge and Data Engineering","author":[{"propositions":[],"lastnames":["Rutkowski"],"firstnames":["Leszek"],"suffixes":[]},{"propositions":[],"lastnames":["Pietruczuk"],"firstnames":["Lena"],"suffixes":[]},{"propositions":[],"lastnames":["Duda"],"firstnames":["Piotr"],"suffixes":[]},{"propositions":[],"lastnames":["Jaworski"],"firstnames":["Maciej"],"suffixes":[]}],"month":"June","year":"2013","note":"Conference Name: IEEE Transactions on Knowledge and Data Engineering","keywords":"Data mining, Data streams, Decision trees, Entropy, Gain measurement, Gini index, Hoeffding's bound, Indexes, Learning systems, McDiarmid's bound, Random variables, decision trees, information gain","pages":"1272–1279","bibtex":"@article{rutkowski_decision_2013,\n\ttitle = {Decision {Trees} for {Mining} {Data} {Streams} {Based} on the {McDiarmid}'s {Bound}},\n\tvolume = {25},\n\tissn = {1558-2191},\n\tdoi = {10.1109/TKDE.2012.66},\n\tabstract = {In mining data streams the most popular tool is the Hoeffding tree algorithm. It uses the Hoeffding's bound to determine the smallest number of examples needed at a node to select a splitting attribute. In the literature the same Hoeffding's bound was used for any evaluation function (heuristic measure), e.g., information gain or Gini index. In this paper, it is shown that the Hoeffding's inequality is not appropriate to solve the underlying problem. We prove two theorems presenting the McDiarmid's bound for both the information gain, used in ID3 algorithm, and for Gini index, used in Classification and Regression Trees (CART) algorithm. The results of the paper guarantee that a decision tree learning system, applied to data streams and based on the McDiarmid's bound, has the property that its output is nearly identical to that of a conventional learner. The results of the paper have a great impact on the state of the art of mining data streams and various developed so far methods and algorithms should be reconsidered.},\n\tnumber = {6},\n\tjournal = {IEEE Transactions on Knowledge and Data Engineering},\n\tauthor = {Rutkowski, Leszek and Pietruczuk, Lena and Duda, Piotr and Jaworski, Maciej},\n\tmonth = jun,\n\tyear = {2013},\n\tnote = {Conference Name: IEEE Transactions on Knowledge and Data Engineering},\n\tkeywords = {Data mining, Data streams, Decision trees, Entropy, Gain measurement, Gini index, Hoeffding's bound, Indexes, Learning systems, McDiarmid's bound, Random variables, decision trees, information gain},\n\tpages = {1272--1279},\n}\n\n\n\n","author_short":["Rutkowski, L.","Pietruczuk, L.","Duda, P.","Jaworski, M."],"key":"rutkowski_decision_2013","id":"rutkowski_decision_2013","bibbaseid":"rutkowski-pietruczuk-duda-jaworski-decisiontreesforminingdatastreamsbasedonthemcdiarmidsbound-2013","role":"author","urls":{},"keyword":["Data mining","Data streams","Decision trees","Entropy","Gain measurement","Gini index","Hoeffding's bound","Indexes","Learning systems","McDiarmid's bound","Random variables","decision trees","information gain"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"article","biburl":"https://bibbase.org/zotero/mh_lenguyen","dataSources":["iwKepCrWBps7ojhDx"],"keywords":["data mining","data streams","decision trees","entropy","gain measurement","gini index","hoeffding's bound","indexes","learning systems","mcdiarmid's bound","random variables","decision trees","information gain"],"search_terms":["decision","trees","mining","data","streams","based","mcdiarmid","bound","rutkowski","pietruczuk","duda","jaworski"],"title":"Decision Trees for Mining Data Streams Based on the McDiarmid's Bound","year":2013}