@inproceedings{c54166ce97334378ab0af52a3fd1a2b0,
title = "Erasure entropy",
abstract = "We define the erasure entropy of a collection of random variables as the sum of entropies of the individual variables conditioned on all the rest. The erasure entropy rate of a source is defined as the limit of the normalized erasure entropy. The erasure entropy measures the information content carried by each symbol knowing its context. In the setup of a source observed through an erasure channel, we offer an operational characterization of erasure entropy rate as the minimal amount of bits per erasure required to recover the erased information in the limit of small erasure probability. When we allow recovery of the erased symbols within a prescribed degree of distortion, the fundamental tradeoff is described by the erasure rate-distortion function which we characterize. When no additional encoded information is available, the erased information is reconstructed solely on the basis of its context by a denoiser. Connections between erasure entropy and discrete denoising are also explored.",
keywords = "Data compression, Discrete denoising, Entropy, Erasure channels, Markov processes, Rate-distortion theory, Shannon theory",
author = "Sergio Verd{\'u} and Tsachy Weissman",
year = "2006",
doi = "10.1109/ISIT.2006.261682",
language = "English (US)",
isbn = "1424405041",
series = "IEEE International Symposium on Information Theory - Proceedings",
pages = "98--102",
booktitle = "Proceedings - 2006 IEEE International Symposium on Information Theory, ISIT 2006",
note = "2006 IEEE International Symposium on Information Theory, ISIT 2006 ; Conference date: 09-07-2006 Through 14-07-2006",
}