CD-MOA: Change Detection Framework for Massive Online Analysis. Bifet, A., Read, J., Pfahringer, B., Holmes, G., & źLiobaităź, I. In Proceedings of the 12th International Symposium on Advances in Intelligent Data Analysis XII - Volume 8207, of IDA 2013, pages 92–103, Berlin, Heidelberg, October, 2013. Springer-Verlag. Paper doi abstract bibtex Analysis of data from networked digital information systems such as mobile devices, remote sensors, and streaming applications, needs to deal with two challenges: the size of data and the capacity to be adaptive to changes in concept in real-time. Many approaches meet the challenge by using an explicit change detector alongside a classification algorithm and then evaluate performance using classification accuracy. However, there is an unexpected connection between change detectors and classification methods that needs to be acknowledged. The phenomenon has been observed previously, connecting high classification performance with high false positive rates. The implication is that we need to be careful to evaluate systems against intended outcomes—high classification rates, low false alarm rates, compromises between the two and so forth. This paper proposes a new experimental framework for evaluating change detection methods against intended outcomes. The framework is general in the sense that it can be used with other data mining tasks such as frequent item and pattern mining, clustering etc. Included in the framework is a new measure of performance of a change detector that monitors the compromise between fast detection and false alarms. Using this new experimental framework we conduct an evaluation study on synthetic and real-world datasets to show that classification performance is indeed a poor proxy for change detection performance and provide further evidence that classification performance is correlated strongly with the use of change detectors that produce high false positive rates.
@inproceedings{bifet_cd-moa_2013,
address = {Berlin, Heidelberg},
series = {{IDA} 2013},
title = {{CD}-{MOA}: {Change} {Detection} {Framework} for {Massive} {Online} {Analysis}},
isbn = {978-3-642-41397-1},
shorttitle = {{CD}-{MOA}},
url = {https://doi.org/10.1007/978-3-642-41398-8_9},
doi = {10.1007/978-3-642-41398-8_9},
abstract = {Analysis of data from networked digital information systems such as mobile devices, remote sensors, and streaming applications, needs to deal with two challenges: the size of data and the capacity to be adaptive to changes in concept in real-time. Many approaches meet the challenge by using an explicit change detector alongside a classification algorithm and then evaluate performance using classification accuracy. However, there is an unexpected connection between change detectors and classification methods that needs to be acknowledged. The phenomenon has been observed previously, connecting high classification performance with high false positive rates. The implication is that we need to be careful to evaluate systems against intended outcomes---high classification rates, low false alarm rates, compromises between the two and so forth. This paper proposes a new experimental framework for evaluating change detection methods against intended outcomes. The framework is general in the sense that it can be used with other data mining tasks such as frequent item and pattern mining, clustering etc. Included in the framework is a new measure of performance of a change detector that monitors the compromise between fast detection and false alarms. Using this new experimental framework we conduct an evaluation study on synthetic and real-world datasets to show that classification performance is indeed a poor proxy for change detection performance and provide further evidence that classification performance is correlated strongly with the use of change detectors that produce high false positive rates.},
urldate = {2022-03-17},
booktitle = {Proceedings of the 12th {International} {Symposium} on {Advances} in {Intelligent} {Data} {Analysis} {XII} - {Volume} 8207},
publisher = {Springer-Verlag},
author = {Bifet, Albert and Read, Jesse and Pfahringer, Bernhard and Holmes, Geoff and źLiobaităź, Indrăź},
month = oct,
year = {2013},
keywords = {data streams, dynamic, evolving, incremental, online},
pages = {92--103},
}
Downloads: 0
{"_id":"bGfNBkXbkMvPknmm8","bibbaseid":"bifet-read-pfahringer-holmes-liobait-cdmoachangedetectionframeworkformassiveonlineanalysis-2013","author_short":["Bifet, A.","Read, J.","Pfahringer, B.","Holmes, G.","źLiobaităź, I."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","address":"Berlin, Heidelberg","series":"IDA 2013","title":"CD-MOA: Change Detection Framework for Massive Online Analysis","isbn":"978-3-642-41397-1","shorttitle":"CD-MOA","url":"https://doi.org/10.1007/978-3-642-41398-8_9","doi":"10.1007/978-3-642-41398-8_9","abstract":"Analysis of data from networked digital information systems such as mobile devices, remote sensors, and streaming applications, needs to deal with two challenges: the size of data and the capacity to be adaptive to changes in concept in real-time. Many approaches meet the challenge by using an explicit change detector alongside a classification algorithm and then evaluate performance using classification accuracy. However, there is an unexpected connection between change detectors and classification methods that needs to be acknowledged. The phenomenon has been observed previously, connecting high classification performance with high false positive rates. The implication is that we need to be careful to evaluate systems against intended outcomes—high classification rates, low false alarm rates, compromises between the two and so forth. This paper proposes a new experimental framework for evaluating change detection methods against intended outcomes. The framework is general in the sense that it can be used with other data mining tasks such as frequent item and pattern mining, clustering etc. Included in the framework is a new measure of performance of a change detector that monitors the compromise between fast detection and false alarms. Using this new experimental framework we conduct an evaluation study on synthetic and real-world datasets to show that classification performance is indeed a poor proxy for change detection performance and provide further evidence that classification performance is correlated strongly with the use of change detectors that produce high false positive rates.","urldate":"2022-03-17","booktitle":"Proceedings of the 12th International Symposium on Advances in Intelligent Data Analysis XII - Volume 8207","publisher":"Springer-Verlag","author":[{"propositions":[],"lastnames":["Bifet"],"firstnames":["Albert"],"suffixes":[]},{"propositions":[],"lastnames":["Read"],"firstnames":["Jesse"],"suffixes":[]},{"propositions":[],"lastnames":["Pfahringer"],"firstnames":["Bernhard"],"suffixes":[]},{"propositions":[],"lastnames":["Holmes"],"firstnames":["Geoff"],"suffixes":[]},{"propositions":[],"lastnames":["źLiobaităź"],"firstnames":["Indrăź"],"suffixes":[]}],"month":"October","year":"2013","keywords":"data streams, dynamic, evolving, incremental, online","pages":"92–103","bibtex":"@inproceedings{bifet_cd-moa_2013,\n\taddress = {Berlin, Heidelberg},\n\tseries = {{IDA} 2013},\n\ttitle = {{CD}-{MOA}: {Change} {Detection} {Framework} for {Massive} {Online} {Analysis}},\n\tisbn = {978-3-642-41397-1},\n\tshorttitle = {{CD}-{MOA}},\n\turl = {https://doi.org/10.1007/978-3-642-41398-8_9},\n\tdoi = {10.1007/978-3-642-41398-8_9},\n\tabstract = {Analysis of data from networked digital information systems such as mobile devices, remote sensors, and streaming applications, needs to deal with two challenges: the size of data and the capacity to be adaptive to changes in concept in real-time. Many approaches meet the challenge by using an explicit change detector alongside a classification algorithm and then evaluate performance using classification accuracy. However, there is an unexpected connection between change detectors and classification methods that needs to be acknowledged. The phenomenon has been observed previously, connecting high classification performance with high false positive rates. The implication is that we need to be careful to evaluate systems against intended outcomes---high classification rates, low false alarm rates, compromises between the two and so forth. This paper proposes a new experimental framework for evaluating change detection methods against intended outcomes. The framework is general in the sense that it can be used with other data mining tasks such as frequent item and pattern mining, clustering etc. Included in the framework is a new measure of performance of a change detector that monitors the compromise between fast detection and false alarms. Using this new experimental framework we conduct an evaluation study on synthetic and real-world datasets to show that classification performance is indeed a poor proxy for change detection performance and provide further evidence that classification performance is correlated strongly with the use of change detectors that produce high false positive rates.},\n\turldate = {2022-03-17},\n\tbooktitle = {Proceedings of the 12th {International} {Symposium} on {Advances} in {Intelligent} {Data} {Analysis} {XII} - {Volume} 8207},\n\tpublisher = {Springer-Verlag},\n\tauthor = {Bifet, Albert and Read, Jesse and Pfahringer, Bernhard and Holmes, Geoff and źLiobaităź, Indrăź},\n\tmonth = oct,\n\tyear = {2013},\n\tkeywords = {data streams, dynamic, evolving, incremental, online},\n\tpages = {92--103},\n}\n\n\n\n","author_short":["Bifet, A.","Read, J.","Pfahringer, B.","Holmes, G.","źLiobaităź, I."],"key":"bifet_cd-moa_2013","id":"bifet_cd-moa_2013","bibbaseid":"bifet-read-pfahringer-holmes-liobait-cdmoachangedetectionframeworkformassiveonlineanalysis-2013","role":"author","urls":{"Paper":"https://doi.org/10.1007/978-3-642-41398-8_9"},"keyword":["data streams","dynamic","evolving","incremental","online"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"inproceedings","biburl":"https://bibbase.org/zotero/mh_lenguyen","dataSources":["iwKepCrWBps7ojhDx"],"keywords":["data streams","dynamic","evolving","incremental","online"],"search_terms":["moa","change","detection","framework","massive","online","analysis","bifet","read","pfahringer","holmes","źliobaităź"],"title":"CD-MOA: Change Detection Framework for Massive Online Analysis","year":2013}