Game theory, maximum entropy, minimum discrepancy and robust Bayesian decision theory. Grünwald, P. D. & Philip Dawid, A. Annals of Statistics, 32(4):1367--1433, 2004.
doi  abstract   bibtex   
We describe and develop a close relationship between two prob-lems that have customarily been regarded as distinct: that of max-imizing entropy, and that of minimizing worst-case expected loss. Using a formulation grounded in the equilibrium theory of zero-sum games between Decision Maker and Nature, these two problems are shown to be dual to each other, the solution to each providing that to the other. Although Tops\oe described this connection for the Shan-non entropy over 20 years ago, it does not appear to be widely known even in that important special case. We here generalize this theory to apply to arbitrary decision prob-lems and loss functions. We indicate how an appropriate generalized definition of entropy can be associated with such a problem, and we show that, subject to certain regularity conditions, the above-mentioned duality continues to apply in this extended context. This simultaneously provides a possible rationale for maximizing entropy and a tool for finding robust Bayes acts. We also describe the essen-tial identity between the problem of maximizing entropy and that of minimizing a related discrepancy or divergence between distribu-tions. This leads to an extension, to arbitrary discrepancies, of a well-known minimax theorem for the case of Kullback–Leibler divergence (the " redundancy-capacity theorem " of information theory).
@article{Grunwald2004,
abstract = {We describe and develop a close relationship between two prob-lems that have customarily been regarded as distinct: that of max-imizing entropy, and that of minimizing worst-case expected loss. Using a formulation grounded in the equilibrium theory of zero-sum games between Decision Maker and Nature, these two problems are shown to be dual to each other, the solution to each providing that to the other. Although Tops{\o}e described this connection for the Shan-non entropy over 20 years ago, it does not appear to be widely known even in that important special case. We here generalize this theory to apply to arbitrary decision prob-lems and loss functions. We indicate how an appropriate generalized definition of entropy can be associated with such a problem, and we show that, subject to certain regularity conditions, the above-mentioned duality continues to apply in this extended context. This simultaneously provides a possible rationale for maximizing entropy and a tool for finding robust Bayes acts. We also describe the essen-tial identity between the problem of maximizing entropy and that of minimizing a related discrepancy or divergence between distribu-tions. This leads to an extension, to arbitrary discrepancies, of a well-known minimax theorem for the case of Kullback–Leibler divergence (the " redundancy-capacity theorem " of information theory).},
archivePrefix = {arXiv},
arxivId = {arXiv:math/0410076v1},
author = {Gr{\"{u}}nwald, Peter D. and {Philip Dawid}, A.},
doi = {10.1214/009053604000000553},
eprint = {0410076v1},
file = {:Users/brekels/Documents/Mendeley Desktop/Game theory, maximum entropy, minimum discrepancy and robust Bayesian decision theory - Gr{\"{u}}nwald, Philip Dawid.pdf:pdf},
isbn = {00905364},
issn = {00905364},
journal = {Annals of Statistics},
keywords = {Additive model,Bayes act,Bregman divergence,Brier score,Convexity,Duality,Equalizer rule,Exponential family,Gamma-minimax,Generalized exponential family,Kullback-Leibler divergence,Logarithmic score,Maximin,Mean-value constraints,Minimax},
number = {4},
pages = {1367--1433},
primaryClass = {arXiv:math},
title = {{Game theory, maximum entropy, minimum discrepancy and robust Bayesian decision theory}},
volume = {32},
year = {2004}
}

Downloads: 0