An optimization methodology for neural network weights and architectures. Ludermir, T., Yamazaki, A., & Zanchettin, C. IEEE Transactions on Neural Networks, 2006. doi abstract bibtex This paper introduces a methodology for neural network global optimization. The aim is the simultaneous optimization of multilayer perceptron (MLP) network weights and architectures, in order to generate topologies with few connections and high classification performance for any data sets. The approach combines the advantages of simulated annealing, tabu search and the backpropagation training algorithm in order to generate an automatic process for producing networks with high classification performance and low complexity. Experimental results obtained with four classification problems and one prediction problem has shown to be better than those obtained by the most commonly used optimization techniques. © 2006 IEEE.
@article{
title = {An optimization methodology for neural network weights and architectures},
type = {article},
year = {2006},
keywords = {Multilayer perceptron (MLP),Optimization of weights and architectures,Simulating annealing,Tabu search},
volume = {17},
id = {3d673406-d70d-3e1d-ac59-d3023aa026ff},
created = {2019-02-14T18:02:01.197Z},
file_attached = {false},
profile_id = {74e7d4ea-3dac-3118-aab9-511a5b337e8f},
last_modified = {2019-02-14T18:02:01.197Z},
read = {false},
starred = {false},
authored = {true},
confirmed = {false},
hidden = {false},
private_publication = {false},
abstract = {This paper introduces a methodology for neural network global optimization. The aim is the simultaneous optimization of multilayer perceptron (MLP) network weights and architectures, in order to generate topologies with few connections and high classification performance for any data sets. The approach combines the advantages of simulated annealing, tabu search and the backpropagation training algorithm in order to generate an automatic process for producing networks with high classification performance and low complexity. Experimental results obtained with four classification problems and one prediction problem has shown to be better than those obtained by the most commonly used optimization techniques. © 2006 IEEE.},
bibtype = {article},
author = {Ludermir, T.B. and Yamazaki, A. and Zanchettin, C.},
doi = {10.1109/TNN.2006.881047},
journal = {IEEE Transactions on Neural Networks},
number = {6}
}
Downloads: 0
{"_id":"pSbZCSfspy5S7PsFh","bibbaseid":"ludermir-yamazaki-zanchettin-anoptimizationmethodologyforneuralnetworkweightsandarchitectures-2006","authorIDs":["95PhW7tkuv95vtHAq","PtDsdiZ3iPSFZKH6J"],"author_short":["Ludermir, T.","Yamazaki, A.","Zanchettin, C."],"bibdata":{"title":"An optimization methodology for neural network weights and architectures","type":"article","year":"2006","keywords":"Multilayer perceptron (MLP),Optimization of weights and architectures,Simulating annealing,Tabu search","volume":"17","id":"3d673406-d70d-3e1d-ac59-d3023aa026ff","created":"2019-02-14T18:02:01.197Z","file_attached":false,"profile_id":"74e7d4ea-3dac-3118-aab9-511a5b337e8f","last_modified":"2019-02-14T18:02:01.197Z","read":false,"starred":false,"authored":"true","confirmed":false,"hidden":false,"private_publication":false,"abstract":"This paper introduces a methodology for neural network global optimization. The aim is the simultaneous optimization of multilayer perceptron (MLP) network weights and architectures, in order to generate topologies with few connections and high classification performance for any data sets. The approach combines the advantages of simulated annealing, tabu search and the backpropagation training algorithm in order to generate an automatic process for producing networks with high classification performance and low complexity. Experimental results obtained with four classification problems and one prediction problem has shown to be better than those obtained by the most commonly used optimization techniques. © 2006 IEEE.","bibtype":"article","author":"Ludermir, T.B. and Yamazaki, A. and Zanchettin, C.","doi":"10.1109/TNN.2006.881047","journal":"IEEE Transactions on Neural Networks","number":"6","bibtex":"@article{\n title = {An optimization methodology for neural network weights and architectures},\n type = {article},\n year = {2006},\n keywords = {Multilayer perceptron (MLP),Optimization of weights and architectures,Simulating annealing,Tabu search},\n volume = {17},\n id = {3d673406-d70d-3e1d-ac59-d3023aa026ff},\n created = {2019-02-14T18:02:01.197Z},\n file_attached = {false},\n profile_id = {74e7d4ea-3dac-3118-aab9-511a5b337e8f},\n last_modified = {2019-02-14T18:02:01.197Z},\n read = {false},\n starred = {false},\n authored = {true},\n confirmed = {false},\n hidden = {false},\n private_publication = {false},\n abstract = {This paper introduces a methodology for neural network global optimization. The aim is the simultaneous optimization of multilayer perceptron (MLP) network weights and architectures, in order to generate topologies with few connections and high classification performance for any data sets. The approach combines the advantages of simulated annealing, tabu search and the backpropagation training algorithm in order to generate an automatic process for producing networks with high classification performance and low complexity. Experimental results obtained with four classification problems and one prediction problem has shown to be better than those obtained by the most commonly used optimization techniques. © 2006 IEEE.},\n bibtype = {article},\n author = {Ludermir, T.B. and Yamazaki, A. and Zanchettin, C.},\n doi = {10.1109/TNN.2006.881047},\n journal = {IEEE Transactions on Neural Networks},\n number = {6}\n}","author_short":["Ludermir, T.","Yamazaki, A.","Zanchettin, C."],"biburl":"https://bibbase.org/service/mendeley/74e7d4ea-3dac-3118-aab9-511a5b337e8f","bibbaseid":"ludermir-yamazaki-zanchettin-anoptimizationmethodologyforneuralnetworkweightsandarchitectures-2006","role":"author","urls":{},"keyword":["Multilayer perceptron (MLP)","Optimization of weights and architectures","Simulating annealing","Tabu search"],"metadata":{"authorlinks":{"zanchettin, c":"https://bibbase.org/service/mendeley/74e7d4ea-3dac-3118-aab9-511a5b337e8f","zanchettin, c":"https://zanche.github.io/publications/"}},"downloads":0},"bibtype":"article","creationDate":"2020-09-17T14:33:46.201Z","downloads":0,"keywords":["multilayer perceptron (mlp)","optimization of weights and architectures","simulating annealing","tabu search"],"search_terms":["optimization","methodology","neural","network","weights","architectures","ludermir","yamazaki","zanchettin"],"title":"An optimization methodology for neural network weights and architectures","year":2006,"biburl":"https://bibbase.org/service/mendeley/74e7d4ea-3dac-3118-aab9-511a5b337e8f","dataSources":["fvRdkx56Jpp5ebtSw","XkGKCoQgZDKqXZqdh","ya2CyA73rpZseyrZ8","2252seNhipfTmjEBQ"]}