Vapnik-Chervonenkis dimension of recurrent neural networks. Koiran, P. & Sontag, E. Discrete Appl. Math., 86(1):63–79, Elsevier Science Publishers B. V., Amsterdam, The Netherlands, The Netherlands, 1998. doi abstract bibtex This paper provides lower and upper bounds for the VC dimension of recurrent networks. Several types of activation functions are discussed, including threshold, polynomial, piecewise-polynomial and sigmoidal functions. The bounds depend on two independent parameters: the number w of weights in the network, and the length k of the input sequence. Ignoring multiplicative constants, the main results say roughly the following: 1. For architectures whose activation is any fixed nonlinear polynomial, the VC dimension is proportional to wk. 2. For architectures whose activation is any fixed piecewise polynomial, the VC dimension is between wk and w**2k. 3. For architectures with threshold activations, the VC dimension is between wlog(k/w) and the smallest of wklog(wk) and w**2+wlog(wk). 4. For the standard sigmoid tanh(x), the VC dimension is between wk and w**4 k**2.
@ARTICLE{MR1634875,
AUTHOR = {P. Koiran and E.D. Sontag},
JOURNAL = {Discrete Appl. Math.},
TITLE = {Vapnik-Chervonenkis dimension of recurrent neural
networks},
YEAR = {1998},
OPTMONTH = {},
OPTNOTE = {},
NUMBER = {1},
PAGES = {63--79},
VOLUME = {86},
ADDRESS = {Amsterdam, The Netherlands, The Netherlands},
KEYWORDS = {neural networks, recurrent neural networks},
PUBLISHER = {Elsevier Science Publishers B. V.},
PDF = {../../FTPDIR/koiran-dam.pdf},
ABSTRACT = { This paper provides lower and upper bounds for the VC
dimension of recurrent networks. Several types of activation
functions are discussed, including threshold, polynomial,
piecewise-polynomial and sigmoidal functions. The bounds depend on
two independent parameters: the number w of weights in the network,
and the length k of the input sequence. Ignoring multiplicative
constants, the main results say roughly the following: 1. For
architectures whose activation is any fixed nonlinear polynomial, the
VC dimension is proportional to wk. 2. For architectures whose
activation is any fixed piecewise polynomial, the VC dimension is
between wk and w**2k. 3. For architectures with threshold
activations, the VC dimension is between wlog(k/w) and the smallest
of wklog(wk) and w**2+wlog(wk). 4. For the standard sigmoid tanh(x),
the VC dimension is between wk and w**4 k**2. },
DOI = {http://dx.doi.org/10.1016/S0166-218X(98)00014-6}
}
Downloads: 0
{"_id":"ZCwBt7FKoia32uJJS","bibbaseid":"koiran-sontag-vapnikchervonenkisdimensionofrecurrentneuralnetworks-1998","downloads":0,"creationDate":"2018-10-18T05:07:06.406Z","title":"Vapnik-Chervonenkis dimension of recurrent neural networks","author_short":["Koiran, P.","Sontag, E."],"year":1998,"bibtype":"article","biburl":"http://www.sontaglab.org/PUBDIR/Biblio/complete-bibliography.bib","bibdata":{"bibtype":"article","type":"article","author":[{"firstnames":["P."],"propositions":[],"lastnames":["Koiran"],"suffixes":[]},{"firstnames":["E.D."],"propositions":[],"lastnames":["Sontag"],"suffixes":[]}],"journal":"Discrete Appl. Math.","title":"Vapnik-Chervonenkis dimension of recurrent neural networks","year":"1998","optmonth":"","optnote":"","number":"1","pages":"63–79","volume":"86","address":"Amsterdam, The Netherlands, The Netherlands","keywords":"neural networks, recurrent neural networks","publisher":"Elsevier Science Publishers B. V.","pdf":"../../FTPDIR/koiran-dam.pdf","abstract":"This paper provides lower and upper bounds for the VC dimension of recurrent networks. Several types of activation functions are discussed, including threshold, polynomial, piecewise-polynomial and sigmoidal functions. The bounds depend on two independent parameters: the number w of weights in the network, and the length k of the input sequence. Ignoring multiplicative constants, the main results say roughly the following: 1. For architectures whose activation is any fixed nonlinear polynomial, the VC dimension is proportional to wk. 2. For architectures whose activation is any fixed piecewise polynomial, the VC dimension is between wk and w**2k. 3. For architectures with threshold activations, the VC dimension is between wlog(k/w) and the smallest of wklog(wk) and w**2+wlog(wk). 4. For the standard sigmoid tanh(x), the VC dimension is between wk and w**4 k**2. ","doi":"http://dx.doi.org/10.1016/S0166-218X(98)00014-6","bibtex":"@ARTICLE{MR1634875,\n AUTHOR = {P. Koiran and E.D. Sontag},\n JOURNAL = {Discrete Appl. Math.},\n TITLE = {Vapnik-Chervonenkis dimension of recurrent neural \n networks},\n YEAR = {1998},\n OPTMONTH = {},\n OPTNOTE = {},\n NUMBER = {1},\n PAGES = {63--79},\n VOLUME = {86},\n ADDRESS = {Amsterdam, The Netherlands, The Netherlands},\n KEYWORDS = {neural networks, recurrent neural networks},\n PUBLISHER = {Elsevier Science Publishers B. V.},\n PDF = {../../FTPDIR/koiran-dam.pdf},\n ABSTRACT = { This paper provides lower and upper bounds for the VC \n dimension of recurrent networks. Several types of activation \n functions are discussed, including threshold, polynomial, \n piecewise-polynomial and sigmoidal functions. The bounds depend on \n two independent parameters: the number w of weights in the network, \n and the length k of the input sequence. Ignoring multiplicative \n constants, the main results say roughly the following: 1. For \n architectures whose activation is any fixed nonlinear polynomial, the \n VC dimension is proportional to wk. 2. For architectures whose \n activation is any fixed piecewise polynomial, the VC dimension is \n between wk and w**2k. 3. For architectures with threshold \n activations, the VC dimension is between wlog(k/w) and the smallest \n of wklog(wk) and w**2+wlog(wk). 4. For the standard sigmoid tanh(x), \n the VC dimension is between wk and w**4 k**2. },\n DOI = {http://dx.doi.org/10.1016/S0166-218X(98)00014-6}\n}\n\n","author_short":["Koiran, P.","Sontag, E."],"key":"MR1634875","id":"MR1634875","bibbaseid":"koiran-sontag-vapnikchervonenkisdimensionofrecurrentneuralnetworks-1998","role":"author","urls":{},"keyword":["neural networks","recurrent neural networks"],"downloads":0,"html":""},"search_terms":["vapnik","chervonenkis","dimension","recurrent","neural","networks","koiran","sontag"],"keywords":["neural networks","recurrent neural networks"],"authorIDs":["5bc814f9db768e100000015a"],"dataSources":["DKqZbTmd7peqE4THw"]}