{"_id":"t6Jtq4jsNyR2E8gAp","bibbaseid":"jahanshahiu-rezvaniu-wongb-igsc23wattwiserpowerresourceefficientschedulingformultimodelmultigpuinferenceservers-2023","author_short":["Jahanshahi</u>, <. <.","Rezvani</u>, <.","Wong</b>, <."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"[IGSC'23] WattWiser: Power Resource-Efficient Scheduling for Multi-Model Multi-GPU Inference Servers","author":[{"propositions":[],"lastnames":["Jahanshahi</u>"],"firstnames":["<br/>","<u>Ali"],"suffixes":[]},{"propositions":[],"lastnames":["Rezvani</u>"],"firstnames":["<u>Mohammadreza"],"suffixes":[]},{"propositions":[],"lastnames":["Wong</b>"],"firstnames":["<b>Daniel"],"suffixes":[]}],"booktitle":"Proceedings of the 14th International Green and Sustainable Computing Conference (IGSC)","year":"2023","url_paper":"https://danielwong.org/files/WattWiser-IGSC23.pdf","bibtex":"@inproceedings{ali2023igsc,\n title={[IGSC'23] WattWiser: Power Resource-Efficient Scheduling for Multi-Model Multi-GPU Inference Servers},\n author={Jahanshahi</u>, <br/> <u>Ali and Rezvani</u>, <u>Mohammadreza and Wong</b>, <b>Daniel},\n booktitle={Proceedings of the 14th International Green and Sustainable Computing Conference (IGSC)}, \n year={2023},\n url_Paper = {https://danielwong.org/files/WattWiser-IGSC23.pdf}\n}\n\n","author_short":["Jahanshahi</u>, <. <.","Rezvani</u>, <.","Wong</b>, <."],"key":"ali2023igsc","id":"ali2023igsc","bibbaseid":"jahanshahiu-rezvaniu-wongb-igsc23wattwiserpowerresourceefficientschedulingformultimodelmultigpuinferenceservers-2023","role":"author","urls":{" paper":"https://danielwong.org/files/WattWiser-IGSC23.pdf"},"metadata":{"authorlinks":{}},"downloads":0,"html":""},"bibtype":"inproceedings","biburl":"danielwong.org/publication.bib","dataSources":["aLPopSyWTZ72WfLhv"],"keywords":[],"search_terms":["igsc","wattwiser","power","resource","efficient","scheduling","multi","model","multi","gpu","inference","servers","jahanshahi</u>","rezvani</u>","wong</b>"],"title":"[IGSC'23] WattWiser: Power Resource-Efficient Scheduling for Multi-Model Multi-GPU Inference Servers","year":2023}