A Training Algorithm for Optimal Margin Classifiers. Boser, B., E., Guyon, I., M., & Vapnik, V., N. In Proceedings of the Fifth Annual Workshop on Computational Learning Theory (COLT), pages 144-152, 1992.
A Training Algorithm for Optimal Margin Classifiers [link]Website  abstract   bibtex   
A training algorithm that maximizes the margin between the training patterns and the decision boundary is presented. The technique is applicable to a wide variety of the classification functions, including Perceptrons, polynomials, and Radial Basis Functions. The effective number of parameters is adjusted automatically to match the complexity of the problem. The solution is expressed as a linear combination of supporting patterns. These are the subset of training patterns that are closest to the decision boundary. Bounds on the generalization performance based on the leave-one-out method and the VC-dimension are given. Experimental results on optical character recognition problems demonstrate the good generalization obtained when compared with other learning algorithms.
@inProceedings{
 title = {A Training Algorithm for Optimal Margin Classifiers},
 type = {inProceedings},
 year = {1992},
 identifiers = {[object Object]},
 pages = {144-152},
 websites = {http://dx.doi.org/10.1145/130385.130401},
 id = {46c017fa-1999-3833-a0fc-207d1b71f5b2},
 created = {2018-07-12T21:31:43.368Z},
 file_attached = {false},
 profile_id = {f954d000-ce94-3da6-bd26-b983145a920f},
 group_id = {b0b145a3-980e-3ad7-a16f-c93918c606ed},
 last_modified = {2018-07-12T21:31:43.368Z},
 read = {false},
 starred = {false},
 authored = {false},
 confirmed = {true},
 hidden = {false},
 citation_key = {boser:kerneltrick},
 source_type = {inproceedings},
 private_publication = {false},
 abstract = {A training algorithm that maximizes the margin between the training patterns and the decision boundary is presented. The technique is applicable to a wide variety of the classification functions, including Perceptrons, polynomials, and Radial Basis Functions. The effective number of parameters is adjusted automatically to match the complexity of the problem. The solution is expressed as a linear combination of supporting patterns. These are the subset of training patterns that are closest to the decision boundary. Bounds on the generalization performance based on the leave-one-out method and the VC-dimension are given. Experimental results on optical character recognition problems demonstrate the good generalization obtained when compared with other learning algorithms.},
 bibtype = {inProceedings},
 author = {Boser, Bernhard E and Guyon, Isabelle M and Vapnik, Vladimir N},
 booktitle = {Proceedings of the Fifth Annual Workshop on Computational Learning Theory (COLT)}
}
Downloads: 0