Reducing the Dimensionality of Data with Neural Networks. Hinton, G. E. & Salakhutdinov, R. R. Science, 313(5786):504–507, July, 2006. rate: 5
Paper doi abstract bibtex 14 downloads High-dimensional data can be converted to low-dimensional codes by training a multilayer neural network with a small central layer to reconstruct high-dimensional input vectors. Gradient descent can be used for fine-tuning the weights in such “autoencoder” networks, but this works well only if the initial weights are close to a good solution. We describe an effective way of initializing the weights that allows deep autoencoder networks to learn low-dimensional codes that work much better than principal components analysis as a tool to reduce the dimensionality of data.
@article{hinton_reducing_2006,
title = {Reducing the {Dimensionality} of {Data} with {Neural} {Networks}},
volume = {313},
issn = {0036-8075, 1095-9203},
url = {https://www.science.org/doi/10.1126/science.1127647},
doi = {10.1126/science.1127647},
abstract = {High-dimensional data can be converted to low-dimensional codes by training a multilayer neural network with a small central layer to reconstruct high-dimensional input vectors. Gradient descent can be used for fine-tuning the weights in such “autoencoder” networks, but this works well only if the initial weights are close to a good solution. We describe an effective way of initializing the weights that allows deep autoencoder networks to learn low-dimensional codes that work much better than principal components analysis as a tool to reduce the dimensionality of data.},
language = {en},
number = {5786},
urldate = {2023-06-16},
journal = {Science},
author = {Hinton, G. E. and Salakhutdinov, R. R.},
month = jul,
year = {2006},
note = {rate: 5},
keywords = {\#Science, /unread, ⭐⭐⭐⭐⭐},
pages = {504--507},
}
Downloads: 14
{"_id":{"_str":"534259580e946d920a000a06"},"__v":1,"authorIDs":["2ACfCTBEv4pRPLwBb","4DQrsTmafKuPbvKom","5457dd852abc8e9f3700082c","5de76c4f179cbdde01000135","5de7b92fbc280fdf01000192","5de7e861c8f9f6df01000188","5de7ff309b61e8de0100005f","5de917d35d589edf01000025","5de93bf8b8c3f8de010000a3","5de95819d574c6de010000d5","5de96615d574c6de010001ab","5de9faf7fac96fde01000039","5dea1112fac96fde01000194","5deb75f49e04d1df010000c8","5deb8542b62591df0100002d","5deb946fb62591df010000ef","5decb37d93ac84df01000108","5dece9a3619535de010000f9","5dee20da584fb4df0100023f","5dee5ebb773914de01000077","5dee6b12773914de0100015a","5deea5af0ceb4cdf01000193","5deee4cc66e59ade01000133","5def23c6e83f7dde0100003c","5def2e39e83f7dde010000a6","5def601cfe2024de01000084","5defdd35090769df01000181","5df0938cf651f5df01000056","5df0980df651f5df010000a2","5df0c74096fa76de01000024","5df0eda045b054df010000fb","5df2008fe4cb4ede01000035","5df2583563aac8df010000ad","5df25ae963aac8df010000dd","5df28978cf8320de0100001f","5df3756223fb6fdf010000fe","5df38d112b1f8ade01000086","5df3f9cad1756cdf01000039","5df4ca0755b997de0100009a","5df4cd8055b997de010000c2","5df53e56fd245cde01000125","5df60b78a37a40df01000156","5df62fce38e915de0100004b","5df6491ddf30fcdf0100003d","5df67503797ba9de01000104","5df6983872bbd4df01000160","5df6b0e031a37ade01000178","5df789d35c8a36df010000f7","5df7c23392a8e4df010000da","5df7dafbdc100cde010000e1","5df7e65edc100cde010001c6","5df89d4010b1d1de01000088","5df8b0cee6b510df01000021","5df93745d04b27df01000185","5df9d77138a7afde01000084","5dfa483ced5baede0100011b","5dfa67a37d1403df01000123","5dfbc3f34705b7de01000022","5dfcc5cc7a3608de0100004f","5dfe49bfbfbabdde01000004","5e1dc9478d71ddde0100015d","5e29d9d0888177df0100011e","5e48c117f1ed39de0100008d","5e555c0ee89e5fde010000e6","5e55fa1c819fabdf0100003a","5e5b04db6e568ade0100001f","5hGMdsfN7BrXW6K8T","5vmPz2jJcYQdtZPiZ","6yoSqPPyPrLdz8e5Q","BYkXaBeGZENiggkom","Bm98SYMoSNDbYwKGj","EsmZfHTQHAoi4zrJ2","N6cuxqTfG9ybhWDqZ","PXRdnhZs2CXY9NLhX","Q7zrKooGeSy8NTBjC","QxWxCp32GcmNqJ9K2","WnMtdN4pbnNcAtJ9C","e3ZEg6YfZmhHyjxdZ","exw99o2vqr9d3BXtB","fnGMsMDrpkcjCLZ5X","gN5Lfqjgx8P4c7HJT","gxtJ9RRRnpW2hQdtv","hCHC3WLvySqxwH4eZ","jN4BRAzEpDg6bmHmM","mBpuinLcpSzpxcFaz","n3Tju5NZ6trek5XEM","n3hXojCsQTaqGTPyY","ovEhxZqGLG9hGfrun","rnZ6cT67qkowNdLgz","u6Fai3nvyHwLKZpPn","vcz5Swk9goZXRki2G","x9kDqsoXq57J2bEu5","xmZk6XEacSsFbo2Sy","xufS6EqKGDqRQs47H"],"author_short":["Hinton, G. E.","Salakhutdinov, R. R."],"bibbaseid":"hinton-salakhutdinov-reducingthedimensionalityofdatawithneuralnetworks-2006","bibdata":{"bibtype":"article","type":"article","title":"Reducing the Dimensionality of Data with Neural Networks","volume":"313","issn":"0036-8075, 1095-9203","url":"https://www.science.org/doi/10.1126/science.1127647","doi":"10.1126/science.1127647","abstract":"High-dimensional data can be converted to low-dimensional codes by training a multilayer neural network with a small central layer to reconstruct high-dimensional input vectors. Gradient descent can be used for fine-tuning the weights in such “autoencoder” networks, but this works well only if the initial weights are close to a good solution. We describe an effective way of initializing the weights that allows deep autoencoder networks to learn low-dimensional codes that work much better than principal components analysis as a tool to reduce the dimensionality of data.","language":"en","number":"5786","urldate":"2023-06-16","journal":"Science","author":[{"propositions":[],"lastnames":["Hinton"],"firstnames":["G.","E."],"suffixes":[]},{"propositions":[],"lastnames":["Salakhutdinov"],"firstnames":["R.","R."],"suffixes":[]}],"month":"July","year":"2006","note":"rate: 5","keywords":"#Science, /unread, ⭐⭐⭐⭐⭐","pages":"504–507","bibtex":"@article{hinton_reducing_2006,\n\ttitle = {Reducing the {Dimensionality} of {Data} with {Neural} {Networks}},\n\tvolume = {313},\n\tissn = {0036-8075, 1095-9203},\n\turl = {https://www.science.org/doi/10.1126/science.1127647},\n\tdoi = {10.1126/science.1127647},\n\tabstract = {High-dimensional data can be converted to low-dimensional codes by training a multilayer neural network with a small central layer to reconstruct high-dimensional input vectors. Gradient descent can be used for fine-tuning the weights in such “autoencoder” networks, but this works well only if the initial weights are close to a good solution. We describe an effective way of initializing the weights that allows deep autoencoder networks to learn low-dimensional codes that work much better than principal components analysis as a tool to reduce the dimensionality of data.},\n\tlanguage = {en},\n\tnumber = {5786},\n\turldate = {2023-06-16},\n\tjournal = {Science},\n\tauthor = {Hinton, G. E. and Salakhutdinov, R. R.},\n\tmonth = jul,\n\tyear = {2006},\n\tnote = {rate: 5},\n\tkeywords = {\\#Science, /unread, ⭐⭐⭐⭐⭐},\n\tpages = {504--507},\n}\n\n\n\n","author_short":["Hinton, G. E.","Salakhutdinov, R. R."],"key":"hinton_reducing_2006","id":"hinton_reducing_2006","bibbaseid":"hinton-salakhutdinov-reducingthedimensionalityofdatawithneuralnetworks-2006","role":"author","urls":{"Paper":"https://www.science.org/doi/10.1126/science.1127647"},"keyword":["#Science","/unread","⭐⭐⭐⭐⭐"],"metadata":{"authorlinks":{"hinton, g":"https://bibbase.org/show?bib=www.cs.toronto.edu/~fritz/master3.bib&theme=side"}},"downloads":14,"html":""},"bibtype":"article","biburl":"https://bibbase.org/zotero/zzhenry2012","downloads":14,"keywords":["#science","/unread","⭐⭐⭐⭐⭐"],"search_terms":["reducing","dimensionality","data","neural","networks","hinton","salakhutdinov"],"title":"Reducing the Dimensionality of Data with Neural Networks","year":2006,"dataSources":["avdRdTCKoXoyxo2tQ","GtChgCdrAm62yoP3L","nZHrFJKyxKKDaWYM8"]}