Incremental and decremental support vector machine learning. Cauwenberghs, G. & Poggio, T. In Proceedings of the 13th International Conference on Neural Information Processing Systems, of NIPS'00, pages 388–394, Cambridge, MA, USA, January, 2000. MIT Press. abstract bibtex An on-line recursive algorithm for training support vector machines, one vector at a time, is presented. Adiabatic increments retain the Kuhn-Tucker conditions on all previously seen training data, in a number of steps each computed analytically. The incremental procedure is reversible, and decremental "unlearning" offers an efficient method to exactly evaluate leave-one-out generalization performance. Interpretation of decremental unlearning in feature space sheds light on the relationship between generalization and geometry of the data.
@inproceedings{cauwenberghs_incremental_2000,
address = {Cambridge, MA, USA},
series = {{NIPS}'00},
title = {Incremental and decremental support vector machine learning},
abstract = {An on-line recursive algorithm for training support vector machines, one vector at a time, is presented. Adiabatic increments retain the Kuhn-Tucker conditions on all previously seen training data, in a number of steps each computed analytically. The incremental procedure is reversible, and decremental "unlearning" offers an efficient method to exactly evaluate leave-one-out generalization performance. Interpretation of decremental unlearning in feature space sheds light on the relationship between generalization and geometry of the data.},
urldate = {2022-03-15},
booktitle = {Proceedings of the 13th {International} {Conference} on {Neural} {Information} {Processing} {Systems}},
publisher = {MIT Press},
author = {Cauwenberghs, Gert and Poggio, Tomaso},
month = jan,
year = {2000},
pages = {388--394},
}
Downloads: 0
{"_id":"k4J5CX2W8CAHyXWCz","bibbaseid":"cauwenberghs-poggio-incrementalanddecrementalsupportvectormachinelearning-2000","downloads":0,"creationDate":"2015-01-28T10:49:18.154Z","title":"Incremental and decremental support vector machine learning","author_short":["Cauwenberghs, G.","Poggio, T."],"year":2000,"bibtype":"inproceedings","biburl":"https://bibbase.org/zotero/mh_lenguyen","bibdata":{"bibtype":"inproceedings","type":"inproceedings","address":"Cambridge, MA, USA","series":"NIPS'00","title":"Incremental and decremental support vector machine learning","abstract":"An on-line recursive algorithm for training support vector machines, one vector at a time, is presented. Adiabatic increments retain the Kuhn-Tucker conditions on all previously seen training data, in a number of steps each computed analytically. The incremental procedure is reversible, and decremental \"unlearning\" offers an efficient method to exactly evaluate leave-one-out generalization performance. Interpretation of decremental unlearning in feature space sheds light on the relationship between generalization and geometry of the data.","urldate":"2022-03-15","booktitle":"Proceedings of the 13th International Conference on Neural Information Processing Systems","publisher":"MIT Press","author":[{"propositions":[],"lastnames":["Cauwenberghs"],"firstnames":["Gert"],"suffixes":[]},{"propositions":[],"lastnames":["Poggio"],"firstnames":["Tomaso"],"suffixes":[]}],"month":"January","year":"2000","pages":"388–394","bibtex":"@inproceedings{cauwenberghs_incremental_2000,\n\taddress = {Cambridge, MA, USA},\n\tseries = {{NIPS}'00},\n\ttitle = {Incremental and decremental support vector machine learning},\n\tabstract = {An on-line recursive algorithm for training support vector machines, one vector at a time, is presented. Adiabatic increments retain the Kuhn-Tucker conditions on all previously seen training data, in a number of steps each computed analytically. The incremental procedure is reversible, and decremental \"unlearning\" offers an efficient method to exactly evaluate leave-one-out generalization performance. Interpretation of decremental unlearning in feature space sheds light on the relationship between generalization and geometry of the data.},\n\turldate = {2022-03-15},\n\tbooktitle = {Proceedings of the 13th {International} {Conference} on {Neural} {Information} {Processing} {Systems}},\n\tpublisher = {MIT Press},\n\tauthor = {Cauwenberghs, Gert and Poggio, Tomaso},\n\tmonth = jan,\n\tyear = {2000},\n\tpages = {388--394},\n}\n\n\n\n","author_short":["Cauwenberghs, G.","Poggio, T."],"key":"cauwenberghs_incremental_2000","id":"cauwenberghs_incremental_2000","bibbaseid":"cauwenberghs-poggio-incrementalanddecrementalsupportvectormachinelearning-2000","role":"author","urls":{},"metadata":{"authorlinks":{}},"downloads":0,"html":""},"search_terms":["incremental","decremental","support","vector","machine","learning","cauwenberghs","poggio"],"keywords":[],"authorIDs":[],"dataSources":["FyP9yqWPBapLTBP2R","iwKepCrWBps7ojhDx"]}