Stochastic Deep Networks. family=Bie , g., Peyré, G., & Cuturi, M.
Stochastic Deep Networks [link]Paper  abstract   bibtex   
Machine learning is increasingly targeting areas where input data cannot be accurately described by a single vector, but can be modeled instead using the more flexible concept of random vectors, namely probability measures or more simply point clouds of varying cardinality. Using deep architectures on measures poses, however, many challenging issues. Indeed, deep architectures are originally designed to handle fixedlength vectors, or, using recursive mechanisms, ordered sequences thereof. In sharp contrast, measures describe a varying number of weighted observations with no particular order. We propose in this work a deep framework designed to handle crucial aspects of measures, namely permutation invariances, variations in weights and cardinality. Architectures derived from this pipeline can (i) map measures to measures - using the concept of push-forward operators; (ii) bridge the gap between measures and Euclidean spaces - through integration steps. This allows to design discriminative networks (to classify or reduce the dimensionality of input measures), generative architectures (to synthesize measures) and recurrent pipelines (to predict measure dynamics). We provide a theoretical analysis of these building blocks, review our architectures' approximation abilities and robustness w.r.t. perturbation, and try them on various discriminative and generative tasks.
@article{debieStochasticDeepNetworks2018,
  archivePrefix = {arXiv},
  eprinttype = {arxiv},
  eprint = {1811.07429},
  primaryClass = {cs, stat},
  title = {Stochastic {{Deep Networks}}},
  url = {http://arxiv.org/abs/1811.07429},
  abstract = {Machine learning is increasingly targeting areas where input data cannot be accurately described by a single vector, but can be modeled instead using the more flexible concept of random vectors, namely probability measures or more simply point clouds of varying cardinality. Using deep architectures on measures poses, however, many challenging issues. Indeed, deep architectures are originally designed to handle fixedlength vectors, or, using recursive mechanisms, ordered sequences thereof. In sharp contrast, measures describe a varying number of weighted observations with no particular order. We propose in this work a deep framework designed to handle crucial aspects of measures, namely permutation invariances, variations in weights and cardinality. Architectures derived from this pipeline can (i) map measures to measures - using the concept of push-forward operators; (ii) bridge the gap between measures and Euclidean spaces - through integration steps. This allows to design discriminative networks (to classify or reduce the dimensionality of input measures), generative architectures (to synthesize measures) and recurrent pipelines (to predict measure dynamics). We provide a theoretical analysis of these building blocks, review our architectures' approximation abilities and robustness w.r.t. perturbation, and try them on various discriminative and generative tasks.},
  urldate = {2019-04-25},
  date = {2018-11-18},
  keywords = {Statistics - Machine Learning,Computer Science - Machine Learning},
  author = {family=Bie, given=Gwendoline, prefix=de, useprefix=true and Peyré, Gabriel and Cuturi, Marco},
  file = {/home/dimitri/Nextcloud/Zotero/storage/6P7NQ9RK/de Bie et al. - 2018 - Stochastic Deep Networks.pdf;/home/dimitri/Nextcloud/Zotero/storage/PIGZRQRU/1811.html}
}

Downloads: 0