DAGs with NO TEARS: Continuous Optimization for Structure Learning. Zheng, X., Aragam, B., Ravikumar, P., & Xing, E. P. 2018.
Paper doi abstract bibtex Estimating the structure of directed acyclic graphs (DAGs, also known as Bayesian networks) is a challenging problem since the search space of DAGs is combinatorial and scales superexponentially with the number of nodes. Existing approaches rely on various local heuristics for enforcing the acyclicity constraint. In this paper, we introduce a fundamentally different strategy: We formulate the structure learning problem as a purely \emph\continuous\ optimization problem over real matrices that avoids this combinatorial constraint entirely. This is achieved by a novel characterization of acyclicity that is not only smooth but also exact. The resulting problem can be efficiently solved by standard numerical algorithms, which also makes implementation effortless. The proposed method outperforms existing ones, without imposing any structural assumptions on the graph such as bounded treewidth or in-degree. Code implementing the proposed algorithm is open-source and publicly available at https://github.com/xunzheng/notears.
@misc{zheng_dags_2018,
title = {{DAGs} with {NO} {TEARS}: {Continuous} {Optimization} for {Structure} {Learning}},
copyright = {arXiv.org perpetual, non-exclusive license},
shorttitle = {{DAGs} with {NO} {TEARS}},
url = {https://arxiv.org/abs/1803.01422},
doi = {10.48550/ARXIV.1803.01422},
abstract = {Estimating the structure of directed acyclic graphs (DAGs, also known as Bayesian networks) is a challenging problem since the search space of DAGs is combinatorial and scales superexponentially with the number of nodes. Existing approaches rely on various local heuristics for enforcing the acyclicity constraint. In this paper, we introduce a fundamentally different strategy: We formulate the structure learning problem as a purely {\textbackslash}emph\{continuous\} optimization problem over real matrices that avoids this combinatorial constraint entirely. This is achieved by a novel characterization of acyclicity that is not only smooth but also exact. The resulting problem can be efficiently solved by standard numerical algorithms, which also makes implementation effortless. The proposed method outperforms existing ones, without imposing any structural assumptions on the graph such as bounded treewidth or in-degree. Code implementing the proposed algorithm is open-source and publicly available at https://github.com/xunzheng/notears.},
urldate = {2025-02-24},
publisher = {arXiv},
author = {Zheng, Xun and Aragam, Bryon and Ravikumar, Pradeep and Xing, Eric P.},
year = {2018},
keywords = {Artificial Intelligence (cs.AI), FOS: Computer and information sciences, Machine Learning (cs.LG), Machine Learning (stat.ML), Methodology (stat.ME), 🟢},
}
Downloads: 0
{"_id":"akn7Tpxk7mo4kfpz6","bibbaseid":"zheng-aragam-ravikumar-xing-dagswithnotearscontinuousoptimizationforstructurelearning-2018","author_short":["Zheng, X.","Aragam, B.","Ravikumar, P.","Xing, E. P."],"bibdata":{"bibtype":"misc","type":"misc","title":"DAGs with NO TEARS: Continuous Optimization for Structure Learning","copyright":"arXiv.org perpetual, non-exclusive license","shorttitle":"DAGs with NO TEARS","url":"https://arxiv.org/abs/1803.01422","doi":"10.48550/ARXIV.1803.01422","abstract":"Estimating the structure of directed acyclic graphs (DAGs, also known as Bayesian networks) is a challenging problem since the search space of DAGs is combinatorial and scales superexponentially with the number of nodes. Existing approaches rely on various local heuristics for enforcing the acyclicity constraint. In this paper, we introduce a fundamentally different strategy: We formulate the structure learning problem as a purely \\emph\\continuous\\ optimization problem over real matrices that avoids this combinatorial constraint entirely. This is achieved by a novel characterization of acyclicity that is not only smooth but also exact. The resulting problem can be efficiently solved by standard numerical algorithms, which also makes implementation effortless. The proposed method outperforms existing ones, without imposing any structural assumptions on the graph such as bounded treewidth or in-degree. Code implementing the proposed algorithm is open-source and publicly available at https://github.com/xunzheng/notears.","urldate":"2025-02-24","publisher":"arXiv","author":[{"propositions":[],"lastnames":["Zheng"],"firstnames":["Xun"],"suffixes":[]},{"propositions":[],"lastnames":["Aragam"],"firstnames":["Bryon"],"suffixes":[]},{"propositions":[],"lastnames":["Ravikumar"],"firstnames":["Pradeep"],"suffixes":[]},{"propositions":[],"lastnames":["Xing"],"firstnames":["Eric","P."],"suffixes":[]}],"year":"2018","keywords":"Artificial Intelligence (cs.AI), FOS: Computer and information sciences, Machine Learning (cs.LG), Machine Learning (stat.ML), Methodology (stat.ME), 🟢","bibtex":"@misc{zheng_dags_2018,\n\ttitle = {{DAGs} with {NO} {TEARS}: {Continuous} {Optimization} for {Structure} {Learning}},\n\tcopyright = {arXiv.org perpetual, non-exclusive license},\n\tshorttitle = {{DAGs} with {NO} {TEARS}},\n\turl = {https://arxiv.org/abs/1803.01422},\n\tdoi = {10.48550/ARXIV.1803.01422},\n\tabstract = {Estimating the structure of directed acyclic graphs (DAGs, also known as Bayesian networks) is a challenging problem since the search space of DAGs is combinatorial and scales superexponentially with the number of nodes. Existing approaches rely on various local heuristics for enforcing the acyclicity constraint. In this paper, we introduce a fundamentally different strategy: We formulate the structure learning problem as a purely {\\textbackslash}emph\\{continuous\\} optimization problem over real matrices that avoids this combinatorial constraint entirely. This is achieved by a novel characterization of acyclicity that is not only smooth but also exact. The resulting problem can be efficiently solved by standard numerical algorithms, which also makes implementation effortless. The proposed method outperforms existing ones, without imposing any structural assumptions on the graph such as bounded treewidth or in-degree. Code implementing the proposed algorithm is open-source and publicly available at https://github.com/xunzheng/notears.},\n\turldate = {2025-02-24},\n\tpublisher = {arXiv},\n\tauthor = {Zheng, Xun and Aragam, Bryon and Ravikumar, Pradeep and Xing, Eric P.},\n\tyear = {2018},\n\tkeywords = {Artificial Intelligence (cs.AI), FOS: Computer and information sciences, Machine Learning (cs.LG), Machine Learning (stat.ML), Methodology (stat.ME), 🟢},\n}\n\n\n\n","author_short":["Zheng, X.","Aragam, B.","Ravikumar, P.","Xing, E. P."],"key":"zheng_dags_2018","id":"zheng_dags_2018","bibbaseid":"zheng-aragam-ravikumar-xing-dagswithnotearscontinuousoptimizationforstructurelearning-2018","role":"author","urls":{"Paper":"https://arxiv.org/abs/1803.01422"},"keyword":["Artificial Intelligence (cs.AI)","FOS: Computer and information sciences","Machine Learning (cs.LG)","Machine Learning (stat.ML)","Methodology (stat.ME)","🟢"],"metadata":{"authorlinks":{}},"html":""},"bibtype":"misc","biburl":"https://bibbase.org/zotero-group/dounia_zedira/5884286","dataSources":["iwKepCrWBps7ojhDx","ykL55rZW796LrqK5F"],"keywords":["artificial intelligence (cs.ai)","fos: computer and information sciences","machine learning (cs.lg)","machine learning (stat.ml)","methodology (stat.me)","🟢"],"search_terms":["dags","tears","continuous","optimization","structure","learning","zheng","aragam","ravikumar","xing"],"title":"DAGs with NO TEARS: Continuous Optimization for Structure Learning","year":2018}