Meta Networks. Munkhdalai, T. & Yu, H. In ICML, volume 70, pages 2554–2563, Sydney, Australia, August, 2017.
abstract   bibtex   
Neural networks have been successfully applied in applications with a large amount of labeled data. However, the task of rapid generalization on new concepts with small training data while preserving performances on previously learned ones still presents a significant challenge to neural network models. In this work, we introduce a novel meta learning method, Meta Networks (MetaNet), that learns a meta-level knowledge across tasks and shifts its inductive biases via fast parameterization for rapid generalization. When evaluated on Omniglot and Mini-ImageNet benchmarks, our MetaNet models achieve a near human-level performance and outperform the baseline approaches by up to 6% accuracy. We demonstrate several appealing properties of MetaNet relating to generalization and continual learning.
@inproceedings{munkhdalai_meta_2017,
	address = {Sydney, Australia},
	title = {Meta {Networks}},
	volume = {70},
	abstract = {Neural networks have been successfully applied in applications with a large amount of labeled data. However, the task of rapid generalization on new concepts with small training data while preserving performances on previously learned ones still presents a significant challenge to neural network models. In this work, we introduce a novel meta learning method, Meta Networks (MetaNet), that learns a meta-level knowledge across tasks and shifts its inductive biases via fast parameterization for rapid generalization. When evaluated on Omniglot and Mini-ImageNet benchmarks, our MetaNet models achieve a near human-level performance and outperform the baseline approaches by up to 6\% accuracy. We demonstrate several appealing properties of MetaNet relating to generalization and continual learning.},
	booktitle = {{ICML}},
	author = {Munkhdalai, Tsendsuren and Yu, Hong},
	month = aug,
	year = {2017},
	pmid = {31106300; PMCID: PMC6519722},
	pages = {2554--2563},
}

Downloads: 0