Learning Everywhere: Pervasive Machine Learning for Effective High-Performance Computation: Application Background. Fox, G., Glazier, J., A., Kadupitiya, J., Jadhao, V., Kim, M., Qiu, J., Sluka, J., P., Somogy, E., Marathe, M., Adiga, A., Chen, J., Beckstein, O., Jha, S., Somogyi, E., Marathe, M., Adiga, A., Chen, J., Beckstein, O., & Jha, S. In 2019 IEEE International Parallel and Distributed Processing Symposium Workshops (IPDPSW), pages 33, 5, 2019. IEEE. Website doi abstract bibtex The convergence of HPC and data-intensive methodologies provide a promising approach to major performance improvements. This paper provides a general description of the interaction between traditional HPC and ML approaches and motivates the Learning Everywhere paradigm for HPC. We introduce the concept of effective performance that one can achieve by combining learning methodologies with simulation-based approaches, and distinguish between traditional performance as measured by benchmark scores. To support the promise of integrating HPC and learning methods, this paper examines specific examples and opportunities across a series of domains. It concludes with a series of open computer science and cyberinfrastructure questions and challenges that the Learning Everywhere paradigm presents.
@inproceedings{
title = {Learning Everywhere: Pervasive Machine Learning for Effective High-Performance Computation: Application Background},
type = {inproceedings},
year = {2019},
pages = {33},
websites = {http://dsc.soic.indiana.edu/publications/Learning_Everywhere.pdf,https://ieeexplore.ieee.org/document/8778333/},
month = {5},
publisher = {IEEE},
id = {ecf9f27c-fa08-3caf-a779-1af03baaec95},
created = {2019-10-01T17:20:57.114Z},
accessed = {2019-08-21},
file_attached = {true},
profile_id = {42d295c0-0737-38d6-8b43-508cab6ea85d},
last_modified = {2020-05-11T14:43:32.632Z},
read = {false},
starred = {false},
authored = {true},
confirmed = {true},
hidden = {false},
citation_key = {Fox2019a},
private_publication = {false},
abstract = {The convergence of HPC and data-intensive methodologies provide a promising approach to major performance improvements. This paper provides a general description of the interaction between traditional HPC and ML approaches and motivates the Learning Everywhere paradigm for HPC. We introduce the concept of effective performance that one can achieve by combining learning methodologies with simulation-based approaches, and distinguish between traditional performance as measured by benchmark scores. To support the promise of integrating HPC and learning methods, this paper examines specific examples and opportunities across a series of domains. It concludes with a series of open computer science and cyberinfrastructure questions and challenges that the Learning Everywhere paradigm presents.},
bibtype = {inproceedings},
author = {Fox, Geoffrey and Glazier, James A and Kadupitiya, Jcs and Jadhao, Vikram and Kim, Minje and Qiu, Judy and Sluka, James P. and Somogy, Endre and Marathe, Madhav and Adiga, Abhijin and Chen, Jiangzhuo and Beckstein, Oliver and Jha, Shantenu and Somogyi, Endre and Marathe, Madhav and Adiga, Abhijin and Chen, Jiangzhuo and Beckstein, Oliver and Jha, Shantenu},
doi = {10.1109/IPDPSW.2019.00081},
booktitle = {2019 IEEE International Parallel and Distributed Processing Symposium Workshops (IPDPSW)}
}
Downloads: 0
{"_id":"W86QkM59qHG36FJk6","bibbaseid":"fox-glazier-kadupitiya-jadhao-kim-qiu-sluka-somogy-etal-learningeverywherepervasivemachinelearningforeffectivehighperformancecomputationapplicationbackground-2019","authorIDs":[],"author_short":["Fox, G.","Glazier, J., A.","Kadupitiya, J.","Jadhao, V.","Kim, M.","Qiu, J.","Sluka, J., P.","Somogy, E.","Marathe, M.","Adiga, A.","Chen, J.","Beckstein, O.","Jha, S.","Somogyi, E.","Marathe, M.","Adiga, A.","Chen, J.","Beckstein, O.","Jha, S."],"bibdata":{"title":"Learning Everywhere: Pervasive Machine Learning for Effective High-Performance Computation: Application Background","type":"inproceedings","year":"2019","pages":"33","websites":"http://dsc.soic.indiana.edu/publications/Learning_Everywhere.pdf,https://ieeexplore.ieee.org/document/8778333/","month":"5","publisher":"IEEE","id":"ecf9f27c-fa08-3caf-a779-1af03baaec95","created":"2019-10-01T17:20:57.114Z","accessed":"2019-08-21","file_attached":"true","profile_id":"42d295c0-0737-38d6-8b43-508cab6ea85d","last_modified":"2020-05-11T14:43:32.632Z","read":false,"starred":false,"authored":"true","confirmed":"true","hidden":false,"citation_key":"Fox2019a","private_publication":false,"abstract":"The convergence of HPC and data-intensive methodologies provide a promising approach to major performance improvements. This paper provides a general description of the interaction between traditional HPC and ML approaches and motivates the Learning Everywhere paradigm for HPC. We introduce the concept of effective performance that one can achieve by combining learning methodologies with simulation-based approaches, and distinguish between traditional performance as measured by benchmark scores. To support the promise of integrating HPC and learning methods, this paper examines specific examples and opportunities across a series of domains. It concludes with a series of open computer science and cyberinfrastructure questions and challenges that the Learning Everywhere paradigm presents.","bibtype":"inproceedings","author":"Fox, Geoffrey and Glazier, James A and Kadupitiya, Jcs and Jadhao, Vikram and Kim, Minje and Qiu, Judy and Sluka, James P. and Somogy, Endre and Marathe, Madhav and Adiga, Abhijin and Chen, Jiangzhuo and Beckstein, Oliver and Jha, Shantenu and Somogyi, Endre and Marathe, Madhav and Adiga, Abhijin and Chen, Jiangzhuo and Beckstein, Oliver and Jha, Shantenu","doi":"10.1109/IPDPSW.2019.00081","booktitle":"2019 IEEE International Parallel and Distributed Processing Symposium Workshops (IPDPSW)","bibtex":"@inproceedings{\n title = {Learning Everywhere: Pervasive Machine Learning for Effective High-Performance Computation: Application Background},\n type = {inproceedings},\n year = {2019},\n pages = {33},\n websites = {http://dsc.soic.indiana.edu/publications/Learning_Everywhere.pdf,https://ieeexplore.ieee.org/document/8778333/},\n month = {5},\n publisher = {IEEE},\n id = {ecf9f27c-fa08-3caf-a779-1af03baaec95},\n created = {2019-10-01T17:20:57.114Z},\n accessed = {2019-08-21},\n file_attached = {true},\n profile_id = {42d295c0-0737-38d6-8b43-508cab6ea85d},\n last_modified = {2020-05-11T14:43:32.632Z},\n read = {false},\n starred = {false},\n authored = {true},\n confirmed = {true},\n hidden = {false},\n citation_key = {Fox2019a},\n private_publication = {false},\n abstract = {The convergence of HPC and data-intensive methodologies provide a promising approach to major performance improvements. This paper provides a general description of the interaction between traditional HPC and ML approaches and motivates the Learning Everywhere paradigm for HPC. We introduce the concept of effective performance that one can achieve by combining learning methodologies with simulation-based approaches, and distinguish between traditional performance as measured by benchmark scores. To support the promise of integrating HPC and learning methods, this paper examines specific examples and opportunities across a series of domains. It concludes with a series of open computer science and cyberinfrastructure questions and challenges that the Learning Everywhere paradigm presents.},\n bibtype = {inproceedings},\n author = {Fox, Geoffrey and Glazier, James A and Kadupitiya, Jcs and Jadhao, Vikram and Kim, Minje and Qiu, Judy and Sluka, James P. and Somogy, Endre and Marathe, Madhav and Adiga, Abhijin and Chen, Jiangzhuo and Beckstein, Oliver and Jha, Shantenu and Somogyi, Endre and Marathe, Madhav and Adiga, Abhijin and Chen, Jiangzhuo and Beckstein, Oliver and Jha, Shantenu},\n doi = {10.1109/IPDPSW.2019.00081},\n booktitle = {2019 IEEE International Parallel and Distributed Processing Symposium Workshops (IPDPSW)}\n}","author_short":["Fox, G.","Glazier, J., A.","Kadupitiya, J.","Jadhao, V.","Kim, M.","Qiu, J.","Sluka, J., P.","Somogy, E.","Marathe, M.","Adiga, A.","Chen, J.","Beckstein, O.","Jha, S.","Somogyi, E.","Marathe, M.","Adiga, A.","Chen, J.","Beckstein, O.","Jha, S."],"urls":{"Website":"http://dsc.soic.indiana.edu/publications/Learning_Everywhere.pdf,https://ieeexplore.ieee.org/document/8778333/"},"biburl":"https://bibbase.org/service/mendeley/42d295c0-0737-38d6-8b43-508cab6ea85d","bibbaseid":"fox-glazier-kadupitiya-jadhao-kim-qiu-sluka-somogy-etal-learningeverywherepervasivemachinelearningforeffectivehighperformancecomputationapplicationbackground-2019","role":"author","metadata":{"authorlinks":{}},"downloads":0},"bibtype":"inproceedings","creationDate":"2019-10-01T17:32:36.987Z","downloads":0,"keywords":[],"search_terms":["learning","everywhere","pervasive","machine","learning","effective","high","performance","computation","application","background","fox","glazier","kadupitiya","jadhao","kim","qiu","sluka","somogy","marathe","adiga","chen","beckstein","jha","somogyi","marathe","adiga","chen","beckstein","jha"],"title":"Learning Everywhere: Pervasive Machine Learning for Effective High-Performance Computation: Application Background","year":2019,"biburl":"https://bibbase.org/service/mendeley/42d295c0-0737-38d6-8b43-508cab6ea85d","dataSources":["zgahneP4uAjKbudrQ","ya2CyA73rpZseyrZ8","2252seNhipfTmjEBQ"]}