Support-vector networks. Cortes, C. & Vapnik, V. 20(3):273–297.
Paper doi abstract bibtex Thesupport-vector network is a new learning machine for two-group classification problems. The machine conceptually implements the following idea: input vectors are non-linearly mapped to a very high-dimension feature space. In this feature space a linear decision surface is constructed. Special properties of the decision surface ensures high generalization ability of the learning machine. The idea behind the support-vector network was previously implemented for the restricted case where the training data can be separated without errors. We here extend this result to non-separable training data.
@article{cortes-vapnik-ml95,
title = {Support-vector networks},
volume = {20},
issn = {1573-0565},
url = {https://doi.org/10.1007/BF00994018},
doi = {10.1007/BF00994018},
abstract = {Thesupport-vector network is a new learning machine for two-group classification problems. The machine conceptually implements the following idea: input vectors are non-linearly mapped to a very high-dimension feature space. In this feature space a linear decision surface is constructed. Special properties of the decision surface ensures high generalization ability of the learning machine. The idea behind the support-vector network was previously implemented for the restricted case where the training data can be separated without errors. We here extend this result to non-separable training data.},
pages = {273--297},
number = {3},
journaltitle = {Machine Learning},
shortjournal = {Mach Learn},
author = {Cortes, Corinna and Vapnik, Vladimir},
urldate = {2024-09-24},
date = {1995-09-01},
langid = {english},
keywords = {Artificial Intelligence, efficient learning algorithms, neural networks, pattern recognition, polynomial classifiers, radial basis function classifiers},
file = {Full Text PDF:/Users/ukreddy/Zotero/storage/QFKEKP5G/Cortes and Vapnik - 1995 - Support-vector networks.pdf:application/pdf},
}
Downloads: 0
{"_id":"Xd9fWkNNkXqXuLTXL","bibbaseid":"cortes-vapnik-supportvectornetworks","author_short":["Cortes, C.","Vapnik, V."],"bibdata":{"bibtype":"article","type":"article","title":"Support-vector networks","volume":"20","issn":"1573-0565","url":"https://doi.org/10.1007/BF00994018","doi":"10.1007/BF00994018","abstract":"Thesupport-vector network is a new learning machine for two-group classification problems. The machine conceptually implements the following idea: input vectors are non-linearly mapped to a very high-dimension feature space. In this feature space a linear decision surface is constructed. Special properties of the decision surface ensures high generalization ability of the learning machine. The idea behind the support-vector network was previously implemented for the restricted case where the training data can be separated without errors. We here extend this result to non-separable training data.","pages":"273–297","number":"3","journaltitle":"Machine Learning","shortjournal":"Mach Learn","author":[{"propositions":[],"lastnames":["Cortes"],"firstnames":["Corinna"],"suffixes":[]},{"propositions":[],"lastnames":["Vapnik"],"firstnames":["Vladimir"],"suffixes":[]}],"urldate":"2024-09-24","date":"1995-09-01","langid":"english","keywords":"Artificial Intelligence, efficient learning algorithms, neural networks, pattern recognition, polynomial classifiers, radial basis function classifiers","file":"Full Text PDF:/Users/ukreddy/Zotero/storage/QFKEKP5G/Cortes and Vapnik - 1995 - Support-vector networks.pdf:application/pdf","bibtex":"@article{cortes-vapnik-ml95,\n\ttitle = {Support-vector networks},\n\tvolume = {20},\n\tissn = {1573-0565},\n\turl = {https://doi.org/10.1007/BF00994018},\n\tdoi = {10.1007/BF00994018},\n\tabstract = {Thesupport-vector network is a new learning machine for two-group classification problems. The machine conceptually implements the following idea: input vectors are non-linearly mapped to a very high-dimension feature space. In this feature space a linear decision surface is constructed. Special properties of the decision surface ensures high generalization ability of the learning machine. The idea behind the support-vector network was previously implemented for the restricted case where the training data can be separated without errors. We here extend this result to non-separable training data.},\n\tpages = {273--297},\n\tnumber = {3},\n\tjournaltitle = {Machine Learning},\n\tshortjournal = {Mach Learn},\n\tauthor = {Cortes, Corinna and Vapnik, Vladimir},\n\turldate = {2024-09-24},\n\tdate = {1995-09-01},\n\tlangid = {english},\n\tkeywords = {Artificial Intelligence, efficient learning algorithms, neural networks, pattern recognition, polynomial classifiers, radial basis function classifiers},\n\tfile = {Full Text PDF:/Users/ukreddy/Zotero/storage/QFKEKP5G/Cortes and Vapnik - 1995 - Support-vector networks.pdf:application/pdf},\n}\n\n\n","author_short":["Cortes, C.","Vapnik, V."],"key":"cortes-vapnik-ml95","id":"cortes-vapnik-ml95","bibbaseid":"cortes-vapnik-supportvectornetworks","role":"author","urls":{"Paper":"https://doi.org/10.1007/BF00994018"},"keyword":["Artificial Intelligence","efficient learning algorithms","neural networks","pattern recognition","polynomial classifiers","radial basis function classifiers"],"metadata":{"authorlinks":{}},"downloads":0,"html":""},"bibtype":"article","biburl":"https://bibbase.org/network/files/ERQPGxTT5rBCWeLFE","dataSources":["TJKHZ3TN7ogruuAa6"],"keywords":["artificial intelligence","efficient learning algorithms","neural networks","pattern recognition","polynomial classifiers","radial basis function classifiers"],"search_terms":["support","vector","networks","cortes","vapnik"],"title":"Support-vector networks","year":null}