Snorkel: Rapid Training Data Creation with Weak Supervision. Ratner, A., Bach, S. H., Ehrenberg, H., Fries, J., Wu, S., & Ré, C. 11(3):269-282.
Snorkel: Rapid Training Data Creation with Weak Supervision [link]Paper  doi  abstract   bibtex   
Labeling training data is increasingly the largest bottleneck in deploying machine learning systems. We present Snorkel, a first-of-its-kind system that enables users to train state-of-the-art models without hand labeling any training data. Instead, users write labeling functions that express arbitrary heuristics, which can have unknown accuracies and correlations. Snorkel denoises their outputs without access to ground truth by incorporating the first end-to-end implementation of our recently proposed machine learning paradigm, data programming. We present a flexible interface layer for writing labeling functions based on our experience over the past year collaborating with companies, agencies, and research labs. In a user study, subject matter experts build models 2.8x faster and increase predictive performance an average 45.5% versus seven hours of hand labeling. We study the modeling tradeoffs in this new setting and propose an optimizer for automating tradeoff decisions that gives up to 1.8x speedup per pipeline execution. In two collaborations, with the U.S. Department of Veterans Affairs and the U.S. Food and Drug Administration, and on four open-source text and image data sets representative of other deployments, Snorkel provides 132% average improvements to predictive performance over prior heuristic approaches and comes within an average 3.60% of the predictive performance of large hand-curated training sets.
@article{ratnerSnorkelRapidTraining2017,
  archivePrefix = {arXiv},
  eprinttype = {arxiv},
  eprint = {1711.10160},
  title = {Snorkel: {{Rapid Training Data Creation}} with {{Weak Supervision}}},
  volume = {11},
  issn = {21508097},
  url = {http://arxiv.org/abs/1711.10160},
  doi = {10.14778/3157794.3157797},
  shorttitle = {Snorkel},
  abstract = {Labeling training data is increasingly the largest bottleneck in deploying machine learning systems. We present Snorkel, a first-of-its-kind system that enables users to train state-of-the-art models without hand labeling any training data. Instead, users write labeling functions that express arbitrary heuristics, which can have unknown accuracies and correlations. Snorkel denoises their outputs without access to ground truth by incorporating the first end-to-end implementation of our recently proposed machine learning paradigm, data programming. We present a flexible interface layer for writing labeling functions based on our experience over the past year collaborating with companies, agencies, and research labs. In a user study, subject matter experts build models 2.8x faster and increase predictive performance an average 45.5\% versus seven hours of hand labeling. We study the modeling tradeoffs in this new setting and propose an optimizer for automating tradeoff decisions that gives up to 1.8x speedup per pipeline execution. In two collaborations, with the U.S. Department of Veterans Affairs and the U.S. Food and Drug Administration, and on four open-source text and image data sets representative of other deployments, Snorkel provides 132\% average improvements to predictive performance over prior heuristic approaches and comes within an average 3.60\% of the predictive performance of large hand-curated training sets.},
  number = {3},
  journaltitle = {Proceedings of the VLDB Endowment},
  urldate = {2019-04-16},
  date = {2017-11-01},
  pages = {269-282},
  keywords = {Statistics - Machine Learning,Computer Science - Machine Learning},
  author = {Ratner, Alexander and Bach, Stephen H. and Ehrenberg, Henry and Fries, Jason and Wu, Sen and Ré, Christopher},
  file = {/home/dimitri/Nextcloud/Zotero/storage/KENDFREY/Ratner et al. - 2017 - Snorkel Rapid Training Data Creation with Weak Su.pdf;/home/dimitri/Nextcloud/Zotero/storage/YWL8DGM3/1711.html}
}

Downloads: 0