From Margin to Sparsity. Graepel, T., Herbrich, R., Williamson, & C, R. In Advances in Neural Information Processing Systems 13, pages 210--216, Denver, 2000. The MIT Press. Paper abstract bibtex We present an improvement of Novikoff's perceptron convergence theorem. Reinterpreting this mistake bound as a margin dependent sparsity guarantee allows us to give a PAC-style generalisation error bound for the classifier learned by the dual perceptron learning algorithm. The bound value crucially depends on the margin a support vector machine would achieve on the same data set using the same kernel. Ironically, the bound yields better guarantees than are currently available for the support vector solution itself.
@inproceedings{ DBLP:conf/nips/GraepelHW00,
abstract = {We present an improvement of Novikoff's perceptron convergence theorem. Reinterpreting this mistake bound as a margin dependent sparsity guarantee allows us to give a PAC-style generalisation error bound for the classifier learned by the dual perceptron learning algorithm. The bound value crucially depends on the margin a support vector machine would achieve on the same data set using the same kernel. Ironically, the bound yields better guarantees than are currently available for the support vector solution itself.},
address = {Denver},
author = {Graepel, Thore and Herbrich, Ralf and Williamson, Robert C},
booktitle = {Advances in Neural Information Processing Systems 13},
file = {:Users/rherb/Dropbox/Documents/tex/nips2000/sparsity/perc.pdf:pdf},
pages = {210--216},
publisher = {The MIT Press},
title = {{From Margin to Sparsity}},
url = {http://www.herbrich.me/papers/perc.pdf},
year = {2000}
}
Downloads: 0
{"_id":{"_str":"51f5909c28e84a503b000132"},"__v":2,"authorIDs":[],"author_short":["Graepel, T.","Herbrich, R.","Williamson","C, R."],"bibbaseid":"graepel-herbrich--c-frommargintosparsity-2000","bibdata":{"html":"<div class=\"bibbase_paper\"> \n\n\n<span class=\"bibbase_paper_titleauthoryear\">\n\t<span class=\"bibbase_paper_title\"><a name=\"DBLP:conf/nips/GraepelHW00\"> </a>From Margin to Sparsity.</span>\n\t<span class=\"bibbase_paper_author\">\n<a class=\"bibbase author link\" href=\"http://bibbase.org/show?bib=data.bibbase.org/author/thomas-borchert/?format=bibtex\">Graepel, T.</a>; <a class=\"bibbase author link\" href=\"http://www.herbrich.me/\">Herbrich, R.</a>; Williamson; and C, R.</span>\n\t<!-- <span class=\"bibbase_paper_year\">2000</span>. -->\n</span>\n\n\n\nIn\n<i>Advances in Neural Information Processing Systems 13</i>, page 210--216, Denver, 2000.\n\n\nThe MIT Press.\n\n\n\n\n<br class=\"bibbase_paper_content\"/>\n\n<span class=\"bibbase_paper_content\">\n \n \n <!-- <i -->\n <!-- onclick=\"javascript:log_download('graepel-herbrich--c-frommargintosparsity-2000', 'http://www.herbrich.me/papers/perc.pdf')\">DEBUG -->\n <!-- </i> -->\n\n <a href=\"http://www.herbrich.me/papers/perc.pdf\"\n onclick=\"javascript:log_download('graepel-herbrich--c-frommargintosparsity-2000', 'http://www.herbrich.me/papers/perc.pdf')\">\n <img src=\"http://www.bibbase.org/img/filetypes/pdf.png\"\n\t alt=\"From Margin to Sparsity [.pdf]\" \n\t class=\"bibbase_icon\"\n\t style=\"width: 24px; height: 24px; border: 0px; vertical-align: text-top\" ><span class=\"bibbase_icon_text\">Paper</span></a> \n \n \n \n <a href=\"javascript:showBib('DBLP:conf/nips/GraepelHW00')\"\n class=\"bibbase link\">\n <!-- <img src=\"http://www.bibbase.org/img/filetypes/bib.png\" -->\n\t<!-- alt=\"From Margin to Sparsity [bib]\" -->\n\t<!-- class=\"bibbase_icon\" -->\n\t<!-- style=\"width: 24px; height: 24px; border: 0px; vertical-align: text-top\"><span class=\"bibbase_icon_text\">Bibtex</span> -->\n BibTeX\n <i class=\"fa fa-caret-down\"></i></a>\n \n \n \n <a class=\"bibbase_abstract_link bibbase link\"\n href=\"javascript:showAbstract('DBLP:conf/nips/GraepelHW00')\">\n Abstract\n <i class=\"fa fa-caret-down\"></i></a>\n \n \n \n\n \n \n \n</span>\n\n<div class=\"well well-small bibbase\" id=\"bib_DBLP_conf_nips_GraepelHW00\"\n style=\"display:none\">\n <pre>@inproceedings{ DBLP:conf/nips/GraepelHW00,\n abstract = {We present an improvement of Novikoff's perceptron convergence theorem. Reinterpreting this mistake bound as a margin dependent sparsity guarantee allows us to give a PAC-style generalisation error bound for the classifier learned by the dual perceptron learning algorithm. The bound value crucially depends on the margin a support vector machine would achieve on the same data set using the same kernel. Ironically, the bound yields better guarantees than are currently available for the support vector solution itself.},\n address = {Denver},\n author = {Graepel, Thore and Herbrich, Ralf and Williamson, Robert C},\n booktitle = {Advances in Neural Information Processing Systems 13},\n file = {:Users/rherb/Dropbox/Documents/tex/nips2000/sparsity/perc.pdf:pdf},\n pages = {210--216},\n publisher = {The MIT Press},\n title = {{From Margin to Sparsity}},\n url = {http://www.herbrich.me/papers/perc.pdf},\n year = {2000}\n}</pre>\n</div>\n\n\n<div class=\"well well-small bibbase\" id=\"abstract_DBLP_conf_nips_GraepelHW00\"\n style=\"display:none\">\n We present an improvement of Novikoff's perceptron convergence theorem. Reinterpreting this mistake bound as a margin dependent sparsity guarantee allows us to give a PAC-style generalisation error bound for the classifier learned by the dual perceptron learning algorithm. The bound value crucially depends on the margin a support vector machine would achieve on the same data set using the same kernel. Ironically, the bound yields better guarantees than are currently available for the support vector solution itself.\n</div>\n\n\n</div>\n","downloads":0,"abstract":"We present an improvement of Novikoff's perceptron convergence theorem. Reinterpreting this mistake bound as a margin dependent sparsity guarantee allows us to give a PAC-style generalisation error bound for the classifier learned by the dual perceptron learning algorithm. The bound value crucially depends on the margin a support vector machine would achieve on the same data set using the same kernel. Ironically, the bound yields better guarantees than are currently available for the support vector solution itself.","address":"Denver","author":["Graepel, Thore","Herbrich, Ralf","Williamson","C, Robert"],"author_short":["Graepel, T.","Herbrich, R.","Williamson","C, R."],"bibtex":"@inproceedings{ DBLP:conf/nips/GraepelHW00,\n abstract = {We present an improvement of Novikoff's perceptron convergence theorem. Reinterpreting this mistake bound as a margin dependent sparsity guarantee allows us to give a PAC-style generalisation error bound for the classifier learned by the dual perceptron learning algorithm. The bound value crucially depends on the margin a support vector machine would achieve on the same data set using the same kernel. Ironically, the bound yields better guarantees than are currently available for the support vector solution itself.},\n address = {Denver},\n author = {Graepel, Thore and Herbrich, Ralf and Williamson, Robert C},\n booktitle = {Advances in Neural Information Processing Systems 13},\n file = {:Users/rherb/Dropbox/Documents/tex/nips2000/sparsity/perc.pdf:pdf},\n pages = {210--216},\n publisher = {The MIT Press},\n title = {{From Margin to Sparsity}},\n url = {http://www.herbrich.me/papers/perc.pdf},\n year = {2000}\n}","bibtype":"inproceedings","booktitle":"Advances in Neural Information Processing Systems 13","file":":Users/rherb/Dropbox/Documents/tex/nips2000/sparsity/perc.pdf:pdf","id":"DBLP:conf/nips/GraepelHW00","key":"DBLP:conf/nips/GraepelHW00","pages":"210--216","publisher":"The MIT Press","title":"From Margin to Sparsity","type":"inproceedings","url":"http://www.herbrich.me/papers/perc.pdf","year":"2000","role":"author","urls":{"Paper":"http://www.herbrich.me/papers/perc.pdf"},"bibbaseid":"graepel-herbrich--c-frommargintosparsity-2000"},"bibtype":"inproceedings","biburl":"http://herbrich.me/bib/herbrich.bib","downloads":0,"search_terms":["margin","sparsity","graepel","herbrich","williamson","c"],"title":"From Margin to Sparsity","title_words":["margin","sparsity"],"year":2000,"dataSources":["y2DvMgAcqeDpXQ6ds"]}