Neural networks, principal components, and subspaces. Oja, E. International Journal of Neural Systems, 01(01):61–68, January, 1989.
Paper doi abstract bibtex A single neuron with Hebbian-type learning for the connection weights, and with nonlinear internal feedback, has been shown to extract the statistical principal components of its stationary input pattern sequence. A generalization of this model to a layer of neuron units is given, called the Subspace Network, which yields a multi-dimensional, principal component subspace. This can be used as an associative memory for the input vectors or as a module in nonsupervised learning of data clusters in the input space. It is also able to realize a powerful pattern classifier based on projections on class subspaces. Some classification results for natural textures are given.
@article{oja1989,
title = {Neural networks, principal components, and subspaces},
volume = {01},
issn = {0129-0657},
url = {https://www.worldscientific.com/doi/abs/10.1142/S0129065789000475},
doi = {10.1142/S0129065789000475},
abstract = {A single neuron with Hebbian-type learning for the connection weights, and with nonlinear internal feedback, has been shown to extract the statistical principal components of its stationary input pattern sequence. A generalization of this model to a layer of neuron units is given, called the Subspace Network, which yields a multi-dimensional, principal component subspace. This can be used as an associative memory for the input vectors or as a module in nonsupervised learning of data clusters in the input space. It is also able to realize a powerful pattern classifier based on projections on class subspaces. Some classification results for natural textures are given.},
number = {01},
urldate = {2023-09-21},
journal = {International Journal of Neural Systems},
author = {Oja, Erkki},
month = jan,
year = {1989},
pages = {61--68},
}
Downloads: 0
{"_id":"sMnWrgK86tqErYi5S","bibbaseid":"oja-neuralnetworksprincipalcomponentsandsubspaces-1989","downloads":0,"creationDate":"2015-02-08T05:14:54.276Z","title":"Neural networks, principal components, and subspaces","author_short":["Oja, E."],"year":1989,"bibtype":"article","biburl":"https://bibbase.org/f/8yBxW5neHauDryu9w/LCNeuro Site.bib","bibdata":{"bibtype":"article","type":"article","title":"Neural networks, principal components, and subspaces","volume":"01","issn":"0129-0657","url":"https://www.worldscientific.com/doi/abs/10.1142/S0129065789000475","doi":"10.1142/S0129065789000475","abstract":"A single neuron with Hebbian-type learning for the connection weights, and with nonlinear internal feedback, has been shown to extract the statistical principal components of its stationary input pattern sequence. A generalization of this model to a layer of neuron units is given, called the Subspace Network, which yields a multi-dimensional, principal component subspace. This can be used as an associative memory for the input vectors or as a module in nonsupervised learning of data clusters in the input space. It is also able to realize a powerful pattern classifier based on projections on class subspaces. Some classification results for natural textures are given.","number":"01","urldate":"2023-09-21","journal":"International Journal of Neural Systems","author":[{"propositions":[],"lastnames":["Oja"],"firstnames":["Erkki"],"suffixes":[]}],"month":"January","year":"1989","pages":"61–68","bibtex":"@article{oja1989,\n\ttitle = {Neural networks, principal components, and subspaces},\n\tvolume = {01},\n\tissn = {0129-0657},\n\turl = {https://www.worldscientific.com/doi/abs/10.1142/S0129065789000475},\n\tdoi = {10.1142/S0129065789000475},\n\tabstract = {A single neuron with Hebbian-type learning for the connection weights, and with nonlinear internal feedback, has been shown to extract the statistical principal components of its stationary input pattern sequence. A generalization of this model to a layer of neuron units is given, called the Subspace Network, which yields a multi-dimensional, principal component subspace. This can be used as an associative memory for the input vectors or as a module in nonsupervised learning of data clusters in the input space. It is also able to realize a powerful pattern classifier based on projections on class subspaces. Some classification results for natural textures are given.},\n\tnumber = {01},\n\turldate = {2023-09-21},\n\tjournal = {International Journal of Neural Systems},\n\tauthor = {Oja, Erkki},\n\tmonth = jan,\n\tyear = {1989},\n\tpages = {61--68},\n}\n\n","author_short":["Oja, E."],"bibbaseid":"oja-neuralnetworksprincipalcomponentsandsubspaces-1989","role":"author","urls":{"Paper":"https://www.worldscientific.com/doi/abs/10.1142/S0129065789000475"},"metadata":{"authorlinks":{}},"downloads":0},"search_terms":["neural","networks","principal","components","subspaces","oja"],"keywords":[],"authorIDs":[],"dataSources":["EmYaiv9TCHbg7caTW","ZHHpjMFTPYP8NqJif"]}