Equivalence of ml decoding to a continuous optimization problem. Srinivasavaradhan, S. R., Diggavi, S., & Fragouli, C. In 2020 IEEE International Symposium on Information Theory (ISIT), pages 343–348, 2020. IEEE. doi abstract bibtex 2 downloads Maximum likelihood (ML) and symbolwise maximum aposteriori (MAP) estimation for discrete input sequences play a central role in a number of applications that arise in communications, information and coding theory. Many instances of these problems are proven to be intractable, for example through reduction to NP-complete integer optimization problems. In this work, we prove that the ML estimation of a discrete input sequence (with no assumptions on the encoder/channel used) is equivalent to the solution of a continuous non-convex optimization problem, and that this formulation is closely related to the computation of symbolwise MAP estimates. This equivalence is particularly useful in situations where a function we term the expected likelihood is efficiently computable. In such situations, we give a ML heuristic and show numerics for sequence estimation over the deletion channel.
@inproceedings{srinivasavaradhan2020equivalence,
abstract = {Maximum likelihood (ML) and symbolwise maximum aposteriori (MAP) estimation for discrete input sequences play a central role in a number of applications that arise in communications, information and coding theory. Many instances of these problems are proven to be intractable, for example through reduction to NP-complete integer optimization problems. In this work, we prove that the ML estimation of a discrete input sequence (with no assumptions on the encoder/channel used) is equivalent to the solution of a continuous non-convex optimization problem, and that this formulation is closely related to the computation of symbolwise MAP estimates. This equivalence is particularly useful in situations where a function we term the expected likelihood is efficiently computable. In such situations, we give a ML heuristic and show numerics for sequence estimation over the deletion channel.},
author = {Srinivasavaradhan, Sundara Rajan and Diggavi, Suhas and Fragouli, Christina},
booktitle = {2020 IEEE International Symposium on Information Theory (ISIT)},
organization = {IEEE},
pages = {343--348},
tags = {conf,BioInf,IT,NDS},
title = {Equivalence of ml decoding to a continuous optimization problem},
type = {4},
doi = {10.1109/ISIT44484.2020.9174130},
year = {2020}
}
Downloads: 2
{"_id":"XsydD8jfpTwTXkPMu","bibbaseid":"srinivasavaradhan-diggavi-fragouli-equivalenceofmldecodingtoacontinuousoptimizationproblem-2020","author_short":["Srinivasavaradhan, S. R.","Diggavi, S.","Fragouli, C."],"bibdata":{"bibtype":"inproceedings","type":"4","abstract":"Maximum likelihood (ML) and symbolwise maximum aposteriori (MAP) estimation for discrete input sequences play a central role in a number of applications that arise in communications, information and coding theory. Many instances of these problems are proven to be intractable, for example through reduction to NP-complete integer optimization problems. In this work, we prove that the ML estimation of a discrete input sequence (with no assumptions on the encoder/channel used) is equivalent to the solution of a continuous non-convex optimization problem, and that this formulation is closely related to the computation of symbolwise MAP estimates. This equivalence is particularly useful in situations where a function we term the expected likelihood is efficiently computable. In such situations, we give a ML heuristic and show numerics for sequence estimation over the deletion channel.","author":[{"propositions":[],"lastnames":["Srinivasavaradhan"],"firstnames":["Sundara","Rajan"],"suffixes":[]},{"propositions":[],"lastnames":["Diggavi"],"firstnames":["Suhas"],"suffixes":[]},{"propositions":[],"lastnames":["Fragouli"],"firstnames":["Christina"],"suffixes":[]}],"booktitle":"2020 IEEE International Symposium on Information Theory (ISIT)","organization":"IEEE","pages":"343–348","tags":"conf,BioInf,IT,NDS","title":"Equivalence of ml decoding to a continuous optimization problem","doi":"10.1109/ISIT44484.2020.9174130","year":"2020","bibtex":"@inproceedings{srinivasavaradhan2020equivalence,\n abstract = {Maximum likelihood (ML) and symbolwise maximum aposteriori (MAP) estimation for discrete input sequences play a central role in a number of applications that arise in communications, information and coding theory. Many instances of these problems are proven to be intractable, for example through reduction to NP-complete integer optimization problems. In this work, we prove that the ML estimation of a discrete input sequence (with no assumptions on the encoder/channel used) is equivalent to the solution of a continuous non-convex optimization problem, and that this formulation is closely related to the computation of symbolwise MAP estimates. This equivalence is particularly useful in situations where a function we term the expected likelihood is efficiently computable. In such situations, we give a ML heuristic and show numerics for sequence estimation over the deletion channel.},\n author = {Srinivasavaradhan, Sundara Rajan and Diggavi, Suhas and Fragouli, Christina},\n booktitle = {2020 IEEE International Symposium on Information Theory (ISIT)},\n organization = {IEEE},\n pages = {343--348},\n tags = {conf,BioInf,IT,NDS},\n title = {Equivalence of ml decoding to a continuous optimization problem},\n type = {4},\n doi = {10.1109/ISIT44484.2020.9174130},\n year = {2020}\n}\n\n","author_short":["Srinivasavaradhan, S. R.","Diggavi, S.","Fragouli, C."],"key":"srinivasavaradhan2020equivalence","id":"srinivasavaradhan2020equivalence","bibbaseid":"srinivasavaradhan-diggavi-fragouli-equivalenceofmldecodingtoacontinuousoptimizationproblem-2020","role":"author","urls":{},"metadata":{"authorlinks":{}},"downloads":2,"html":""},"bibtype":"inproceedings","biburl":"https://bibbase.org/network/files/e2kjGxYgtBo8SWSbC","dataSources":["hicKnsKYNEFXC4CgH","jxCYzXXYRqw2fiEXQ","wCByFFrQMyRwfzrJ6","yuqM5ah4HMsTyDrMa","YaM87hGQiepg5qijZ","n9wmfkt5w8CPqCepg","soj2cS6PgG8NPmWGr","FaDBDiyFAJY5pL28h","ycfdiwWPzC2rE6H77","2BHqTGHtDg7BjRAJQ","XpqzCLvCgsYBqbTpK","QNNZyNQzfeETJyTbL","Z6bBhKezwKd3pPWRc"],"keywords":[],"search_terms":["equivalence","decoding","continuous","optimization","problem","srinivasavaradhan","diggavi","fragouli"],"title":"Equivalence of ml decoding to a continuous optimization problem","year":2020,"downloads":2}