@inproceedings{f07869bafaa7442b94e464c6336cc723,
title = "Arimoto-R{\'e}nyi conditional entropy and Bayesian hypothesis testing",
abstract = "This paper gives upper and lower bounds on the minimum error probability of Bayesian M-ary hypothesis testing in terms of the Arimoto-R{\'e}nyi conditional entropy of an arbitrary order α. The improved tightness of these bounds over their specialized versions with the Shannon conditional entropy (α = 1) is demonstrated. In particular, in the case where M is finite, we show how to generalize Fano's inequality under both the conventional and list-decision settings. As a counterpart to the generalized Fano's inequality, allowing M to be infinite, a lower bound on the Arimoto-R{\'e}nyi conditional entropy is derived as a function of the minimum error probability. Explicit upper and lower bounds on the minimum error probability are obtained as a function of the Arimoto-R{\'e}nyi conditional entropy.",
keywords = "Arimoto-R{\'e}nyi conditional entropy, Fano's inequality, Hypothesis testing, Information measures, Minimum probability of error, R{\'e}nyi divergence",
author = "Igal Sason and Sergio Verd{\'u}",
note = "Publisher Copyright: {\textcopyright} 2017 IEEE.; 2017 IEEE International Symposium on Information Theory, ISIT 2017 ; Conference date: 25-06-2017 Through 30-06-2017",
year = "2017",
month = aug,
day = "9",
doi = "10.1109/ISIT.2017.8007073",
language = "English (US)",
series = "IEEE International Symposium on Information Theory - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "2965--2969",
booktitle = "2017 IEEE International Symposium on Information Theory, ISIT 2017",
address = "United States",
}