VC dimension of neural networks. Sontag, E. In Neural Networks and Machine Learning, pages 69-95. Springer, Berlin, 1998. abstract bibtex The Vapnik-Chervonenkis (VC) dimension is an integer which helps to characterize distribution-independent learning of binary concepts from positive and negative samples. This paper, based on lectures delivered at the Isaac Newton Institute in August of 1997, presents a brief introduction, establishes various elementary results, and discusses how to estimate the VC dimension in several examples of interest in neural network theory. (It does not address the learning and estimation-theoretic applications of VC dimension, and the applications to uniform convergence theorems for empirical probabilities, for which many suitable references are available.)
@INCOLLECTION{newtonVC,
AUTHOR = {E.D. Sontag},
BOOKTITLE = {Neural Networks and Machine Learning},
PUBLISHER = {Springer, Berlin},
TITLE = {VC dimension of neural networks},
YEAR = {1998},
OPTADDRESS = {},
OPTCHAPTER = {},
OPTCROSSREF = {},
OPTEDITION = {},
EDITOR = {C.M. Bishop},
OPTMONTH = {},
OPTNOTE = {},
OPTNUMBER = {},
PAGES = {69-95},
OPTSERIES = {},
OPTTYPE = {},
OPTVOLUME = {},
KEYWORDS = {neural networks, VC dimension, learning,
neural networks, shattering},
PDF = {../../FTPDIR/vc-expo.pdf},
ABSTRACT = { The Vapnik-Chervonenkis (VC) dimension is an integer
which helps to characterize distribution-independent learning of
binary concepts from positive and negative samples. This paper, based
on lectures delivered at the Isaac Newton Institute in August of
1997, presents a brief introduction, establishes various elementary
results, and discusses how to estimate the VC dimension in several
examples of interest in neural network theory. (It does not address
the learning and estimation-theoretic applications of VC dimension,
and the applications to uniform convergence theorems for empirical
probabilities, for which many suitable references are available.) }
}
Downloads: 0
{"_id":"Gs2RoGKngcB4qZGFm","bibbaseid":"sontag-vcdimensionofneuralnetworks-1998","downloads":0,"creationDate":"2018-10-18T05:07:06.405Z","title":"VC dimension of neural networks","author_short":["Sontag, E."],"year":1998,"bibtype":"incollection","biburl":"http://www.sontaglab.org/PUBDIR/Biblio/complete-bibliography.bib","bibdata":{"bibtype":"incollection","type":"incollection","author":[{"firstnames":["E.D."],"propositions":[],"lastnames":["Sontag"],"suffixes":[]}],"booktitle":"Neural Networks and Machine Learning","publisher":"Springer, Berlin","title":"VC dimension of neural networks","year":"1998","optaddress":"","optchapter":"","optcrossref":"","optedition":"","editor":[{"firstnames":["C.M."],"propositions":[],"lastnames":["Bishop"],"suffixes":[]}],"optmonth":"","optnote":"","optnumber":"","pages":"69-95","optseries":"","opttype":"","optvolume":"","keywords":"neural networks, VC dimension, learning, neural networks, shattering","pdf":"../../FTPDIR/vc-expo.pdf","abstract":"The Vapnik-Chervonenkis (VC) dimension is an integer which helps to characterize distribution-independent learning of binary concepts from positive and negative samples. This paper, based on lectures delivered at the Isaac Newton Institute in August of 1997, presents a brief introduction, establishes various elementary results, and discusses how to estimate the VC dimension in several examples of interest in neural network theory. (It does not address the learning and estimation-theoretic applications of VC dimension, and the applications to uniform convergence theorems for empirical probabilities, for which many suitable references are available.) ","bibtex":"@INCOLLECTION{newtonVC,\n AUTHOR = {E.D. Sontag},\n BOOKTITLE = {Neural Networks and Machine Learning},\n PUBLISHER = {Springer, Berlin},\n TITLE = {VC dimension of neural networks},\n YEAR = {1998},\n OPTADDRESS = {},\n OPTCHAPTER = {},\n OPTCROSSREF = {},\n OPTEDITION = {},\n EDITOR = {C.M. Bishop},\n OPTMONTH = {},\n OPTNOTE = {},\n OPTNUMBER = {},\n PAGES = {69-95},\n OPTSERIES = {},\n OPTTYPE = {},\n OPTVOLUME = {},\n KEYWORDS = {neural networks, VC dimension, learning, \n neural networks, shattering},\n PDF = {../../FTPDIR/vc-expo.pdf},\n ABSTRACT = { The Vapnik-Chervonenkis (VC) dimension is an integer \n which helps to characterize distribution-independent learning of \n binary concepts from positive and negative samples. This paper, based \n on lectures delivered at the Isaac Newton Institute in August of \n 1997, presents a brief introduction, establishes various elementary \n results, and discusses how to estimate the VC dimension in several \n examples of interest in neural network theory. (It does not address \n the learning and estimation-theoretic applications of VC dimension, \n and the applications to uniform convergence theorems for empirical \n probabilities, for which many suitable references are available.) }\n}\n\n","author_short":["Sontag, E."],"editor_short":["Bishop, C."],"key":"newtonVC","id":"newtonVC","bibbaseid":"sontag-vcdimensionofneuralnetworks-1998","role":"author","urls":{},"keyword":["neural networks","VC dimension","learning","neural networks","shattering"],"downloads":0,"html":""},"search_terms":["dimension","neural","networks","sontag"],"keywords":["neural networks","vc dimension","learning","neural networks","shattering"],"authorIDs":[],"dataSources":["DKqZbTmd7peqE4THw"]}