Hybrid training method for MLP: Optimization of architecture and training. Zanchettin, C., Ludermir, T., & Almeida, L. IEEE Transactions on Systems, Man, and Cybernetics, Part B: Cybernetics, 2011. doi abstract bibtex The performance of an artificial neural network (ANN) depends upon the selection of proper connection weights, network architecture, and cost function during network training. This paper presents a hybrid approach (GaTSa) to optimize the performance of the ANN in terms of architecture and weights. GaTSa is an extension of a previous method (TSa) proposed by the authors. GaTSa is based on the integration of the heuristic simulated annealing (SA), tabu search (TS), genetic algorithms (GA), and backpropagation, whereas TSa does not use GA. The main advantages of GaTSa are the following: a constructive process to add new nodes in the architecture based on GA, the ability to escape from local minima with uphill moves (SA feature), and faster convergence by the evaluation of a set of solutions (TS feature). The performance of GaTSa is investigated through an empirical evaluation of 11 public-domain data sets using different cost functions in the simultaneous optimization of the multilayer perceptron ANN architecture and weights. Experiments demonstrated that GaTSa can also be used for relevant feature selection. GaTSa presented statistically relevant results in comparison with other global and local optimization techniques. © 2011 IEEE.
@article{
title = {Hybrid training method for MLP: Optimization of architecture and training},
type = {article},
year = {2011},
keywords = {Genetic algorithms (GAs),multilayer perceptron (MLP),optimization,simulating annealing,tabu search (TS)},
volume = {41},
id = {9ed6cd4c-a72f-3d6e-a486-a4124513e4c2},
created = {2019-02-14T18:02:01.013Z},
file_attached = {false},
profile_id = {74e7d4ea-3dac-3118-aab9-511a5b337e8f},
last_modified = {2019-02-14T18:02:01.013Z},
read = {false},
starred = {false},
authored = {true},
confirmed = {false},
hidden = {false},
private_publication = {false},
abstract = {The performance of an artificial neural network (ANN) depends upon the selection of proper connection weights, network architecture, and cost function during network training. This paper presents a hybrid approach (GaTSa) to optimize the performance of the ANN in terms of architecture and weights. GaTSa is an extension of a previous method (TSa) proposed by the authors. GaTSa is based on the integration of the heuristic simulated annealing (SA), tabu search (TS), genetic algorithms (GA), and backpropagation, whereas TSa does not use GA. The main advantages of GaTSa are the following: a constructive process to add new nodes in the architecture based on GA, the ability to escape from local minima with uphill moves (SA feature), and faster convergence by the evaluation of a set of solutions (TS feature). The performance of GaTSa is investigated through an empirical evaluation of 11 public-domain data sets using different cost functions in the simultaneous optimization of the multilayer perceptron ANN architecture and weights. Experiments demonstrated that GaTSa can also be used for relevant feature selection. GaTSa presented statistically relevant results in comparison with other global and local optimization techniques. © 2011 IEEE.},
bibtype = {article},
author = {Zanchettin, C. and Ludermir, T.B. and Almeida, L.M.},
doi = {10.1109/TSMCB.2011.2107035},
journal = {IEEE Transactions on Systems, Man, and Cybernetics, Part B: Cybernetics},
number = {4}
}
Downloads: 0
{"_id":"qj3wNwGuNwaN9oSaP","bibbaseid":"zanchettin-ludermir-almeida-hybridtrainingmethodformlpoptimizationofarchitectureandtraining-2011","authorIDs":["95PhW7tkuv95vtHAq","PtDsdiZ3iPSFZKH6J"],"author_short":["Zanchettin, C.","Ludermir, T.","Almeida, L."],"bibdata":{"title":"Hybrid training method for MLP: Optimization of architecture and training","type":"article","year":"2011","keywords":"Genetic algorithms (GAs),multilayer perceptron (MLP),optimization,simulating annealing,tabu search (TS)","volume":"41","id":"9ed6cd4c-a72f-3d6e-a486-a4124513e4c2","created":"2019-02-14T18:02:01.013Z","file_attached":false,"profile_id":"74e7d4ea-3dac-3118-aab9-511a5b337e8f","last_modified":"2019-02-14T18:02:01.013Z","read":false,"starred":false,"authored":"true","confirmed":false,"hidden":false,"private_publication":false,"abstract":"The performance of an artificial neural network (ANN) depends upon the selection of proper connection weights, network architecture, and cost function during network training. This paper presents a hybrid approach (GaTSa) to optimize the performance of the ANN in terms of architecture and weights. GaTSa is an extension of a previous method (TSa) proposed by the authors. GaTSa is based on the integration of the heuristic simulated annealing (SA), tabu search (TS), genetic algorithms (GA), and backpropagation, whereas TSa does not use GA. The main advantages of GaTSa are the following: a constructive process to add new nodes in the architecture based on GA, the ability to escape from local minima with uphill moves (SA feature), and faster convergence by the evaluation of a set of solutions (TS feature). The performance of GaTSa is investigated through an empirical evaluation of 11 public-domain data sets using different cost functions in the simultaneous optimization of the multilayer perceptron ANN architecture and weights. Experiments demonstrated that GaTSa can also be used for relevant feature selection. GaTSa presented statistically relevant results in comparison with other global and local optimization techniques. © 2011 IEEE.","bibtype":"article","author":"Zanchettin, C. and Ludermir, T.B. and Almeida, L.M.","doi":"10.1109/TSMCB.2011.2107035","journal":"IEEE Transactions on Systems, Man, and Cybernetics, Part B: Cybernetics","number":"4","bibtex":"@article{\n title = {Hybrid training method for MLP: Optimization of architecture and training},\n type = {article},\n year = {2011},\n keywords = {Genetic algorithms (GAs),multilayer perceptron (MLP),optimization,simulating annealing,tabu search (TS)},\n volume = {41},\n id = {9ed6cd4c-a72f-3d6e-a486-a4124513e4c2},\n created = {2019-02-14T18:02:01.013Z},\n file_attached = {false},\n profile_id = {74e7d4ea-3dac-3118-aab9-511a5b337e8f},\n last_modified = {2019-02-14T18:02:01.013Z},\n read = {false},\n starred = {false},\n authored = {true},\n confirmed = {false},\n hidden = {false},\n private_publication = {false},\n abstract = {The performance of an artificial neural network (ANN) depends upon the selection of proper connection weights, network architecture, and cost function during network training. This paper presents a hybrid approach (GaTSa) to optimize the performance of the ANN in terms of architecture and weights. GaTSa is an extension of a previous method (TSa) proposed by the authors. GaTSa is based on the integration of the heuristic simulated annealing (SA), tabu search (TS), genetic algorithms (GA), and backpropagation, whereas TSa does not use GA. The main advantages of GaTSa are the following: a constructive process to add new nodes in the architecture based on GA, the ability to escape from local minima with uphill moves (SA feature), and faster convergence by the evaluation of a set of solutions (TS feature). The performance of GaTSa is investigated through an empirical evaluation of 11 public-domain data sets using different cost functions in the simultaneous optimization of the multilayer perceptron ANN architecture and weights. Experiments demonstrated that GaTSa can also be used for relevant feature selection. GaTSa presented statistically relevant results in comparison with other global and local optimization techniques. © 2011 IEEE.},\n bibtype = {article},\n author = {Zanchettin, C. and Ludermir, T.B. and Almeida, L.M.},\n doi = {10.1109/TSMCB.2011.2107035},\n journal = {IEEE Transactions on Systems, Man, and Cybernetics, Part B: Cybernetics},\n number = {4}\n}","author_short":["Zanchettin, C.","Ludermir, T.","Almeida, L."],"biburl":"https://bibbase.org/service/mendeley/74e7d4ea-3dac-3118-aab9-511a5b337e8f","bibbaseid":"zanchettin-ludermir-almeida-hybridtrainingmethodformlpoptimizationofarchitectureandtraining-2011","role":"author","urls":{},"keyword":["Genetic algorithms (GAs)","multilayer perceptron (MLP)","optimization","simulating annealing","tabu search (TS)"],"metadata":{"authorlinks":{"zanchettin, c":"https://bibbase.org/service/mendeley/74e7d4ea-3dac-3118-aab9-511a5b337e8f","zanchettin, c":"https://zanche.github.io/publications/"}},"downloads":0},"bibtype":"article","creationDate":"2020-09-17T14:33:46.198Z","downloads":0,"keywords":["genetic algorithms (gas)","multilayer perceptron (mlp)","optimization","simulating annealing","tabu search (ts)"],"search_terms":["hybrid","training","method","mlp","optimization","architecture","training","zanchettin","ludermir","almeida"],"title":"Hybrid training method for MLP: Optimization of architecture and training","year":2011,"biburl":"https://bibbase.org/service/mendeley/74e7d4ea-3dac-3118-aab9-511a5b337e8f","dataSources":["fvRdkx56Jpp5ebtSw","XkGKCoQgZDKqXZqdh","ya2CyA73rpZseyrZ8","2252seNhipfTmjEBQ"]}