Arbitrary Nonlinearity Is Sufficient to Represent All Functions by Neural Networks: A Theorem. Kreinovich, V. Y. 4(3):381–383. Paper doi abstract bibtex It is proved that if we have neurons implementing arbitrary linear functions and a neuron implementing one (arbitrary but smooth) nonlinear function g(x), then for every continuous function f(x1, …, xm) of arbitrarily many variables and for arbitrary e $>$ 0 we can construct a network that consists of g-neurons and linear neurons and computes f with precision e.

@article{kreinovichArbitraryNonlinearitySufficient1991,
title = {Arbitrary Nonlinearity Is Sufficient to Represent All Functions by Neural Networks: A Theorem},
author = {Kreinovich, Vladik Y.},
date = {1991-01},
journaltitle = {Neural Networks},
volume = {4},
pages = {381--383},
issn = {0893-6080},
doi = {10.1016/0893-6080(91)90074-f},
url = {https://doi.org/10.1016/0893-6080(91)90074-f},
abstract = {It is proved that if we have neurons implementing arbitrary linear functions and a neuron implementing one (arbitrary but smooth) nonlinear function g(x), then for every continuous function f(x1, …, xm) of arbitrarily many variables and for arbitrary e {$>$} 0 we can construct a network that consists of g-neurons and linear neurons and computes f with precision e.},
keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-10833268,artificial-neural-networks,back-propagation-networks,mapping-networks,network-representation-capability,nonideal-neurons,universal-approximation},
number = {3}
}

Downloads: 0

{"_id":"4jyz88fG4CX9YDLbW","bibbaseid":"kreinovich-arbitrarynonlinearityissufficienttorepresentallfunctionsbyneuralnetworksatheorem","authorIDs":[],"author_short":["Kreinovich, V. Y."],"bibdata":{"bibtype":"article","type":"article","title":"Arbitrary Nonlinearity Is Sufficient to Represent All Functions by Neural Networks: A Theorem","author":[{"propositions":[],"lastnames":["Kreinovich"],"firstnames":["Vladik","Y."],"suffixes":[]}],"date":"1991-01","journaltitle":"Neural Networks","volume":"4","pages":"381–383","issn":"0893-6080","doi":"10.1016/0893-6080(91)90074-f","url":"https://doi.org/10.1016/0893-6080(91)90074-f","abstract":"It is proved that if we have neurons implementing arbitrary linear functions and a neuron implementing one (arbitrary but smooth) nonlinear function g(x), then for every continuous function f(x1, …, xm) of arbitrarily many variables and for arbitrary e $>$ 0 we can construct a network that consists of g-neurons and linear neurons and computes f with precision e.","keywords":"*imported-from-citeulike-INRMM,~INRMM-MiD:c-10833268,artificial-neural-networks,back-propagation-networks,mapping-networks,network-representation-capability,nonideal-neurons,universal-approximation","number":"3","bibtex":"@article{kreinovichArbitraryNonlinearitySufficient1991,\n title = {Arbitrary Nonlinearity Is Sufficient to Represent All Functions by Neural Networks: A Theorem},\n author = {Kreinovich, Vladik Y.},\n date = {1991-01},\n journaltitle = {Neural Networks},\n volume = {4},\n pages = {381--383},\n issn = {0893-6080},\n doi = {10.1016/0893-6080(91)90074-f},\n url = {https://doi.org/10.1016/0893-6080(91)90074-f},\n abstract = {It is proved that if we have neurons implementing arbitrary linear functions and a neuron implementing one (arbitrary but smooth) nonlinear function g(x), then for every continuous function f(x1, …, xm) of arbitrarily many variables and for arbitrary e {$>$} 0 we can construct a network that consists of g-neurons and linear neurons and computes f with precision e.},\n keywords = {*imported-from-citeulike-INRMM,~INRMM-MiD:c-10833268,artificial-neural-networks,back-propagation-networks,mapping-networks,network-representation-capability,nonideal-neurons,universal-approximation},\n number = {3}\n}\n\n","author_short":["Kreinovich, V. Y."],"key":"kreinovichArbitraryNonlinearitySufficient1991","id":"kreinovichArbitraryNonlinearitySufficient1991","bibbaseid":"kreinovich-arbitrarynonlinearityissufficienttorepresentallfunctionsbyneuralnetworksatheorem","role":"author","urls":{"Paper":"https://doi.org/10.1016/0893-6080(91)90074-f"},"keyword":["*imported-from-citeulike-INRMM","~INRMM-MiD:c-10833268","artificial-neural-networks","back-propagation-networks","mapping-networks","network-representation-capability","nonideal-neurons","universal-approximation"],"downloads":0},"bibtype":"article","biburl":"https://tmpfiles.org/dl/58794/INRMM.bib","creationDate":"2020-07-02T22:41:11.611Z","downloads":0,"keywords":["*imported-from-citeulike-inrmm","~inrmm-mid:c-10833268","artificial-neural-networks","back-propagation-networks","mapping-networks","network-representation-capability","nonideal-neurons","universal-approximation"],"search_terms":["arbitrary","nonlinearity","sufficient","represent","functions","neural","networks","theorem","kreinovich"],"title":"Arbitrary Nonlinearity Is Sufficient to Represent All Functions by Neural Networks: A Theorem","year":null,"dataSources":["DXuKbcZTirdigFKPF"]}