On the complexity of training neural networks with continuous activation functions. DasGupta, B., Siegelmann, H., & Sontag, E. IEEE Trans. Neural Networks, 6:1490–1504, 1995. abstract bibtex Blum and Rivest showed that any possible neural net learning algorithm based on fixed architectures faces severe computational barriers. This paper extends their NP-completeness result, which applied only to nets based on hard threshold activations, to nets that employ a particular continuous activation. In view of neural network practice, this is a more relevant result to understanding the limitations of backpropagation and related techniques.
@ARTICLE{DasGupta-TNN,
AUTHOR = {B. DasGupta and H.T. Siegelmann and E.D. Sontag},
JOURNAL = {IEEE Trans. Neural Networks},
TITLE = {On the complexity of training neural networks with
continuous activation functions},
YEAR = {1995},
OPTMONTH = {},
OPTNOTE = {},
OPTNUMBER = {},
PAGES = {1490--1504},
VOLUME = {6},
KEYWORDS = {neural networks, analog computing, theory of computing,
neural networks, computational complexity, machine learning},
PDF = {../../FTPDIR/complexity-training.pdf},
ABSTRACT = { Blum and Rivest showed that any possible neural net
learning algorithm based on fixed architectures faces severe
computational barriers. This paper extends their NP-completeness
result, which applied only to nets based on hard threshold
activations, to nets that employ a particular continuous activation.
In view of neural network practice, this is a more relevant result to
understanding the limitations of backpropagation and related
techniques. }
}
Downloads: 0
{"_id":"ectPv88sAWdxBc32g","bibbaseid":"dasgupta-siegelmann-sontag-onthecomplexityoftrainingneuralnetworkswithcontinuousactivationfunctions-1995","downloads":0,"creationDate":"2018-10-18T05:07:06.470Z","title":"On the complexity of training neural networks with continuous activation functions","author_short":["DasGupta, B.","Siegelmann, H.","Sontag, E."],"year":1995,"bibtype":"article","biburl":"http://www.sontaglab.org/PUBDIR/Biblio/complete-bibliography.bib","bibdata":{"bibtype":"article","type":"article","author":[{"firstnames":["B."],"propositions":[],"lastnames":["DasGupta"],"suffixes":[]},{"firstnames":["H.T."],"propositions":[],"lastnames":["Siegelmann"],"suffixes":[]},{"firstnames":["E.D."],"propositions":[],"lastnames":["Sontag"],"suffixes":[]}],"journal":"IEEE Trans. Neural Networks","title":"On the complexity of training neural networks with continuous activation functions","year":"1995","optmonth":"","optnote":"","optnumber":"","pages":"1490–1504","volume":"6","keywords":"neural networks, analog computing, theory of computing, neural networks, computational complexity, machine learning","pdf":"../../FTPDIR/complexity-training.pdf","abstract":"Blum and Rivest showed that any possible neural net learning algorithm based on fixed architectures faces severe computational barriers. This paper extends their NP-completeness result, which applied only to nets based on hard threshold activations, to nets that employ a particular continuous activation. In view of neural network practice, this is a more relevant result to understanding the limitations of backpropagation and related techniques. ","bibtex":"@ARTICLE{DasGupta-TNN,\n AUTHOR = {B. DasGupta and H.T. Siegelmann and E.D. Sontag},\n JOURNAL = {IEEE Trans. Neural Networks},\n TITLE = {On the complexity of training neural networks with \n continuous activation functions},\n YEAR = {1995},\n OPTMONTH = {},\n OPTNOTE = {},\n OPTNUMBER = {},\n PAGES = {1490--1504},\n VOLUME = {6},\n KEYWORDS = {neural networks, analog computing, theory of computing, \n neural networks, computational complexity, machine learning},\n PDF = {../../FTPDIR/complexity-training.pdf},\n ABSTRACT = { Blum and Rivest showed that any possible neural net \n learning algorithm based on fixed architectures faces severe \n computational barriers. This paper extends their NP-completeness \n result, which applied only to nets based on hard threshold \n activations, to nets that employ a particular continuous activation. \n In view of neural network practice, this is a more relevant result to \n understanding the limitations of backpropagation and related \n techniques. }\n}\n\n","author_short":["DasGupta, B.","Siegelmann, H.","Sontag, E."],"key":"DasGupta-TNN","id":"DasGupta-TNN","bibbaseid":"dasgupta-siegelmann-sontag-onthecomplexityoftrainingneuralnetworkswithcontinuousactivationfunctions-1995","role":"author","urls":{},"keyword":["neural networks","analog computing","theory of computing","neural networks","computational complexity","machine learning"],"downloads":0,"html":""},"search_terms":["complexity","training","neural","networks","continuous","activation","functions","dasgupta","siegelmann","sontag"],"keywords":["neural networks","analog computing","theory of computing","neural networks","computational complexity","machine learning"],"authorIDs":["5bc814f9db768e100000015a"],"dataSources":["DKqZbTmd7peqE4THw"]}