AG-ART: An adaptive approach to evolving ART architectures. Kaylani, A.; Georgiopoulos, M.; Mollaghasemi, M.; and Anagnostopoulos, G. C. Neurocomputing, 72(10–12):2079 - 2092, June, 2009. Lattice Computing and Natural Computing (JCIS 2007) / Neural Networks in Intelligent Systems Designn (ISDA 2007)doi abstract bibtex This paper focuses on classification problems, and in particular on the evolution of ARTMAP architectures using genetic algorithms, with the objective of improving generalization performance and alleviating the adaptive resonance theory (ART) category proliferation problem. In a previous effort, we introduced evolutionary fuzzy ARTMAP (FAM), referred to as genetic Fuzzy ARTMAP (GFAM). In this paper we apply an improved genetic algorithm to FAM and extend these ideas to two other ART architectures; ellipsoidal ARTMAP (EAM) and Gaussian ARTMAP (GAM). One of the major advantages of the proposed improved genetic algorithm is that it adapts the GA parameters automatically, and in a way that takes into consideration the intricacies of the classification problem under consideration. The resulting genetically engineered ART architectures are justifiably referred to as AG-FAM, AG-EAM and AG-GAM or collectively as AG-ART (adaptive genetically engineered ART). We compare the performance (in terms of accuracy, size, and computational cost) of the AG-ART architectures with GFAM, and other ART architectures that have appeared in the literature and attempted to solve the category proliferation problem. Our results demonstrate that AG-ART architectures exhibit better performance than their other ART counterparts (semi-supervised ART) and better performance than GFAM. We also compare AG-ART's performance to other related results published in the classification literature, and demonstrate that AG-ART architectures exhibit competitive generalization performance and, quite often, produce smaller size classifiers in solving the same classification problems. We also show that AG-ART's performance gains are achieved within a reasonable computational budget.
@Article{Kaylani2009,
author = {Kaylani, Assem and Georgiopoulos, Michael and Mollaghasemi, Mansooreh and Anagnostopoulos, Georgios C.},
title = {AG-ART: An adaptive approach to evolving ART architectures},
journal = {Neurocomputing},
year = {2009},
volume = {72},
number = {10–12},
pages = {2079 - 2092},
month = {June},
issn = {0925-2312},
note = {Lattice Computing and Natural Computing (JCIS 2007) / Neural Networks in Intelligent Systems Designn (ISDA 2007)},
abstract = {{This paper focuses on classification problems, and in particular
on the evolution of ARTMAP architectures using genetic algorithms,
with the objective of improving generalization performance and alleviating
the adaptive resonance theory (ART) category proliferation problem.
In a previous effort, we introduced evolutionary fuzzy ARTMAP (FAM),
referred to as genetic Fuzzy ARTMAP (GFAM). In this paper we apply
an improved genetic algorithm to FAM and extend these ideas to two
other ART architectures; ellipsoidal ARTMAP (EAM) and Gaussian ARTMAP
(GAM). One of the major advantages of the proposed improved genetic
algorithm is that it adapts the GA parameters automatically, and
in a way that takes into consideration the intricacies of the classification
problem under consideration. The resulting genetically engineered
ART architectures are justifiably referred to as AG-FAM, AG-EAM and
AG-GAM or collectively as AG-ART (adaptive genetically engineered
ART). We compare the performance (in terms of accuracy, size, and
computational cost) of the AG-ART architectures with GFAM, and other
ART architectures that have appeared in the literature and attempted
to solve the category proliferation problem. Our results demonstrate
that AG-ART architectures exhibit better performance than their other
ART counterparts (semi-supervised ART) and better performance than
GFAM. We also compare AG-ART's performance to other related results
published in the classification literature, and demonstrate that
AG-ART architectures exhibit competitive generalization performance
and, quite often, produce smaller size classifiers in solving the
same classification problems. We also show that AG-ART's performance
gains are achieved within a reasonable computational budget.}},
doi = {10.1016/j.neucom.2008.09.016},
keywords = {Machine learning},
owner = {georgio},
timestamp = {2012.04.10},
}