@article{594fba5222704bdf965400ecf066a7ae,
title = "A proof of the Fisher Information Inequality via a data processing argument",
abstract = "The Fisher information J(X) of a random variable X under a translation parameter appears in information theory in the classical proof of the Entropy-Power Inequality (EPI). It enters the proof of the EPI via the De-Bruijn identity, where it measures the variation of the differential entropy under a Gaussian perturbation, and via the convolution inequality J(X + Y) -1 ≥ J(X) -1 + J(Y) -1 (for independent X and Y), known as the Fisher Information Inequality (FII). The FII is proved in the literature directly, in a rather involved way. We give an alternative derivation of the FII, as a simple consequence of a {"}data-processing inequality{"} for the Cramer-Rao lower bound on parameter estimation.",
keywords = "Cramer-Rao bound, Data processing inequality, Entropy-power inequality, Fisher information, Linear modeling, Non-Gaussian noise, Prefiltering",
author = "Ram Zamir",
note = "Funding Information: Manuscript received June 1, 1995; revised October 1, 1997. This work was supported in part by the Wolfson Research Awards administered by the Israel Academy of Science and Humanities. The material in this correspondence was presented in part at the Information Theory Workshop on Multiple Access and Queuing, St. Louis, MO, April 1995.",
year = "1998",
doi = "10.1109/18.669301",
language = "אנגלית",
volume = "44",
pages = "1246--1250",
journal = "IEEE Transactions on Information Theory",
issn = "0018-9448",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
number = "3",
}