A Sparse-Group Lasso. Simon, N., Friedman, J., Hastie, T., & Tibshirani, R. J Comp Graph Stat, 22(2):231-245, 2013. doi abstract bibtex For high-dimensional supervised learning problems, often using problem-specific assumptions can lead to greater accuracy. For problems with grouped covariates, which are believed to have sparse effects both on a group and within group level, we introduce a regularized model for linear regression with l1 and l2 penalties. We discuss the sparsity and other regularization properties of the optimal fit for this model, and show that it has the desired effect of group-wise and within group sparsity. We propose an algorithm to fit the model via accelerated generalized gradient descent, and extend this model and algorithm to convex loss functions. We also demonstrate the efficacy of our model and the efficiency of our algorithm on simulated data. This article has online supplementary material.
@article{sim13spa,
title = {A Sparse-Group Lasso},
volume = {22},
abstract = {For high-dimensional supervised learning problems, often using problem-specific assumptions can lead to greater accuracy. For problems with grouped covariates, which are believed to have sparse effects both on a group and within group level, we introduce a regularized model for linear regression with l1 and l2 penalties. We discuss the sparsity and other regularization properties of the optimal fit for this model, and show that it has the desired effect of group-wise and within group sparsity. We propose an algorithm to fit the model via accelerated generalized gradient descent, and extend this model and algorithm to convex loss functions. We also demonstrate the efficacy of our model and the efficiency of our algorithm on simulated data. This article has online supplementary material.},
number = {2},
journal = {J Comp Graph Stat},
doi = {10.1080/10618600.2012.681250},
author = {Simon, Noah and Friedman, Jerome and Hastie, Trevor and Tibshirani, Robert},
year = {2013},
pages = {231-245},
eprint = {http://www.tandfonline.com/doi/pdf/10.1080/10618600.2012.681250},
citeulike-article-id = {13265969},
citeulike-linkout-0 = {http://dx.doi.org/10.1080/10618600.2012.681250},
citeulike-linkout-1 = {http://www.tandfonline.com/doi/abs/10.1080/10618600.2012.681250},
posted-at = {2014-07-14 14:10:08},
priority = {0},
annote = {sparse effects both on a group and within group levels;can also be considered special case of group lasso allowing overlap between groups}
}
Downloads: 0
{"_id":"3mKLLjR3LCk6an9dY","bibbaseid":"simon-friedman-hastie-tibshirani-asparsegrouplasso-2013","downloads":0,"creationDate":"2018-06-23T20:06:33.978Z","title":"A Sparse-Group Lasso","author_short":["Simon, N.","Friedman, J.","Hastie, T.","Tibshirani, R."],"year":2013,"bibtype":"article","biburl":"http://hbiostat.org/bib/harrelfe.bib","bibdata":{"bibtype":"article","type":"article","title":"A Sparse-Group Lasso","volume":"22","abstract":"For high-dimensional supervised learning problems, often using problem-specific assumptions can lead to greater accuracy. For problems with grouped covariates, which are believed to have sparse effects both on a group and within group level, we introduce a regularized model for linear regression with l1 and l2 penalties. We discuss the sparsity and other regularization properties of the optimal fit for this model, and show that it has the desired effect of group-wise and within group sparsity. We propose an algorithm to fit the model via accelerated generalized gradient descent, and extend this model and algorithm to convex loss functions. We also demonstrate the efficacy of our model and the efficiency of our algorithm on simulated data. This article has online supplementary material.","number":"2","journal":"J Comp Graph Stat","doi":"10.1080/10618600.2012.681250","author":[{"propositions":[],"lastnames":["Simon"],"firstnames":["Noah"],"suffixes":[]},{"propositions":[],"lastnames":["Friedman"],"firstnames":["Jerome"],"suffixes":[]},{"propositions":[],"lastnames":["Hastie"],"firstnames":["Trevor"],"suffixes":[]},{"propositions":[],"lastnames":["Tibshirani"],"firstnames":["Robert"],"suffixes":[]}],"year":"2013","pages":"231-245","eprint":"http://www.tandfonline.com/doi/pdf/10.1080/10618600.2012.681250","citeulike-article-id":"13265969","citeulike-linkout-0":"http://dx.doi.org/10.1080/10618600.2012.681250","citeulike-linkout-1":"http://www.tandfonline.com/doi/abs/10.1080/10618600.2012.681250","posted-at":"2014-07-14 14:10:08","priority":"0","annote":"sparse effects both on a group and within group levels;can also be considered special case of group lasso allowing overlap between groups","bibtex":"@article{sim13spa,\n title = {A Sparse-Group Lasso},\n volume = {22},\n abstract = {For high-dimensional supervised learning problems, often using problem-specific assumptions can lead to greater accuracy. For problems with grouped covariates, which are believed to have sparse effects both on a group and within group level, we introduce a regularized model for linear regression with l1 and l2 penalties. We discuss the sparsity and other regularization properties of the optimal fit for this model, and show that it has the desired effect of group-wise and within group sparsity. We propose an algorithm to fit the model via accelerated generalized gradient descent, and extend this model and algorithm to convex loss functions. We also demonstrate the efficacy of our model and the efficiency of our algorithm on simulated data. This article has online supplementary material.},\n number = {2},\n journal = {J Comp Graph Stat},\n doi = {10.1080/10618600.2012.681250},\n author = {Simon, Noah and Friedman, Jerome and Hastie, Trevor and Tibshirani, Robert},\n year = {2013},\n pages = {231-245},\n eprint = {http://www.tandfonline.com/doi/pdf/10.1080/10618600.2012.681250},\n citeulike-article-id = {13265969},\n citeulike-linkout-0 = {http://dx.doi.org/10.1080/10618600.2012.681250},\n citeulike-linkout-1 = {http://www.tandfonline.com/doi/abs/10.1080/10618600.2012.681250},\n posted-at = {2014-07-14 14:10:08},\n priority = {0},\n annote = {sparse effects both on a group and within group levels;can also be considered special case of group lasso allowing overlap between groups}\n}\n\n","author_short":["Simon, N.","Friedman, J.","Hastie, T.","Tibshirani, R."],"key":"sim13spa","id":"sim13spa","bibbaseid":"simon-friedman-hastie-tibshirani-asparsegrouplasso-2013","role":"author","urls":{},"downloads":0},"search_terms":["sparse","group","lasso","simon","friedman","hastie","tibshirani"],"keywords":["*import"],"authorIDs":[],"dataSources":["mEQakjn8ggpMsnGJi"]}