On Solution Functions of Optimization: Universal Approximation and Covering Number Bounds. Jin, M., Khattar, V., Kaushik, H., Sel, B., & Jia, R. In AAAI Conference on Artificial Intelligence (AAAI), 2023. (oral presentation)Arxiv Pdf abstract bibtex 16 downloads We study the expressibility and learnability of solution functions of convex optimization and their multi-layer architectural extension. The main results are: (1) the class of solution functions of linear programming (LP) and quadratic programming (QP) is a universal approximant for the $C^k$ smooth model class or some restricted Sobolev space, and we characterize the rate-distortion, (2) the approximation power is investigated through a viewpoint of regression error, where information about the target function is provided in terms of data observations, (3) compositionality in the form of deep architecture with optimization as a layer is shown to reconstruct some basic functions used in numerical analysis without error, which implies that (4) a substantial reduction in rate-distortion can be achieved with a universal network architecture, and (5) we discuss the statistical bounds of empirical covering numbers for LP/QP, as well as a generic optimization problem (possibly nonconvex) by exploiting tame geometry. Our results provide the **first rigorous analysis of the approximation and learning-theoretic properties of solution functions** with implications for algorithmic design and performance guarantees.
@inproceedings{2023_4C_Sol,
title={On Solution Functions of Optimization: Universal Approximation and Covering Number Bounds},
author={Ming Jin and Vanshaj Khattar and Harshal Kaushik and Bilgehan Sel and Ruoxi Jia},
booktitle={AAAI Conference on Artificial Intelligence (AAAI)},
note = {<font style="color:#FF0000">(oral presentation)</font>},
pages={},
year={2023},
url_arXiv={https://arxiv.org/abs/2212.01314},
url_pdf={Sol_function_complexity.pdf},
keywords = {Optimization, Machine Learning},
abstract={We study the expressibility and learnability of solution functions of convex optimization and their multi-layer architectural extension. The main results are: (1) the class of solution functions of linear programming (LP) and quadratic programming (QP) is a universal approximant for the $C^k$ smooth model class or some restricted Sobolev space, and we characterize the rate-distortion, (2) the approximation power is investigated through a viewpoint of regression error, where information about the target function is provided in terms of data observations, (3) compositionality in the form of deep architecture with optimization as a layer is shown to reconstruct some basic functions used in numerical analysis without error, which implies that (4) a substantial reduction in rate-distortion can be achieved with a universal network architecture, and (5) we discuss the statistical bounds of empirical covering numbers for LP/QP, as well as a generic optimization problem (possibly nonconvex) by exploiting tame geometry. Our results provide the **first rigorous analysis of the approximation and learning-theoretic properties of solution functions** with implications for algorithmic design and performance guarantees. },
}
Downloads: 16
{"_id":"nHwk9b9FwuJb6uHxb","bibbaseid":"jin-khattar-kaushik-sel-jia-onsolutionfunctionsofoptimizationuniversalapproximationandcoveringnumberbounds-2023","author_short":["Jin, M.","Khattar, V.","Kaushik, H.","Sel, B.","Jia, R."],"bibdata":{"bibtype":"inproceedings","type":"inproceedings","title":"On Solution Functions of Optimization: Universal Approximation and Covering Number Bounds","author":[{"firstnames":["Ming"],"propositions":[],"lastnames":["Jin"],"suffixes":[]},{"firstnames":["Vanshaj"],"propositions":[],"lastnames":["Khattar"],"suffixes":[]},{"firstnames":["Harshal"],"propositions":[],"lastnames":["Kaushik"],"suffixes":[]},{"firstnames":["Bilgehan"],"propositions":[],"lastnames":["Sel"],"suffixes":[]},{"firstnames":["Ruoxi"],"propositions":[],"lastnames":["Jia"],"suffixes":[]}],"booktitle":"AAAI Conference on Artificial Intelligence (AAAI)","note":"<font style=\"color:#FF0000\">(oral presentation)</font>","pages":"","year":"2023","url_arxiv":"https://arxiv.org/abs/2212.01314","url_pdf":"Sol_function_complexity.pdf","keywords":"Optimization, Machine Learning","abstract":"We study the expressibility and learnability of solution functions of convex optimization and their multi-layer architectural extension. The main results are: (1) the class of solution functions of linear programming (LP) and quadratic programming (QP) is a universal approximant for the $C^k$ smooth model class or some restricted Sobolev space, and we characterize the rate-distortion, (2) the approximation power is investigated through a viewpoint of regression error, where information about the target function is provided in terms of data observations, (3) compositionality in the form of deep architecture with optimization as a layer is shown to reconstruct some basic functions used in numerical analysis without error, which implies that (4) a substantial reduction in rate-distortion can be achieved with a universal network architecture, and (5) we discuss the statistical bounds of empirical covering numbers for LP/QP, as well as a generic optimization problem (possibly nonconvex) by exploiting tame geometry. Our results provide the **first rigorous analysis of the approximation and learning-theoretic properties of solution functions** with implications for algorithmic design and performance guarantees. ","bibtex":"@inproceedings{2023_4C_Sol,\n title={On Solution Functions of Optimization: Universal Approximation and Covering Number Bounds},\n author={Ming Jin and Vanshaj Khattar and Harshal Kaushik and Bilgehan Sel and Ruoxi Jia},\n booktitle={AAAI Conference on Artificial Intelligence (AAAI)},\n note = {<font style=\"color:#FF0000\">(oral presentation)</font>},\n pages={},\n year={2023},\n url_arXiv={https://arxiv.org/abs/2212.01314},\n url_pdf={Sol_function_complexity.pdf},\n keywords = {Optimization, Machine Learning},\n abstract={We study the expressibility and learnability of solution functions of convex optimization and their multi-layer architectural extension. The main results are: (1) the class of solution functions of linear programming (LP) and quadratic programming (QP) is a universal approximant for the $C^k$ smooth model class or some restricted Sobolev space, and we characterize the rate-distortion, (2) the approximation power is investigated through a viewpoint of regression error, where information about the target function is provided in terms of data observations, (3) compositionality in the form of deep architecture with optimization as a layer is shown to reconstruct some basic functions used in numerical analysis without error, which implies that (4) a substantial reduction in rate-distortion can be achieved with a universal network architecture, and (5) we discuss the statistical bounds of empirical covering numbers for LP/QP, as well as a generic optimization problem (possibly nonconvex) by exploiting tame geometry. Our results provide the **first rigorous analysis of the approximation and learning-theoretic properties of solution functions** with implications for algorithmic design and performance guarantees. },\n}\n\n","author_short":["Jin, M.","Khattar, V.","Kaushik, H.","Sel, B.","Jia, R."],"key":"2023_4C_Sol","id":"2023_4C_Sol","bibbaseid":"jin-khattar-kaushik-sel-jia-onsolutionfunctionsofoptimizationuniversalapproximationandcoveringnumberbounds-2023","role":"author","urls":{" arxiv":"https://arxiv.org/abs/2212.01314"," pdf":"http://www.jinming.tech/papers/Sol_function_complexity.pdf"},"keyword":["Optimization","Machine Learning"],"metadata":{"authorlinks":{}},"downloads":16},"bibtype":"inproceedings","biburl":"http://www.jinming.tech/papers/myref.bib","dataSources":["sTzDHHaipTZWjp8oe","Y64tp2HnDCfXgLdc5"],"keywords":["optimization","machine learning"],"search_terms":["solution","functions","optimization","universal","approximation","covering","number","bounds","jin","khattar","kaushik","sel","jia"],"title":"On Solution Functions of Optimization: Universal Approximation and Covering Number Bounds","year":2023,"downloads":16}