@inproceedings{a92b41b10d4447779e8b1dd0dac8b637,
title = "Equivalence of Empirical Risk Minimization to Regularization on the Family of f-Divergences",
abstract = "The solution to empirical risk minimization with f-divergence regularization (ERM-DR) is presented under mild conditions on f. Under such conditions, the optimal measure is shown to be unique. Examples of the solution for particular choices of the function f are presented. Previously known solutions to common regularization choices are obtained by lever-aging the flexibility of the family of f-divergences, These include the unique solutions to empirical risk minimization with relative entropy regularization (Type-I and Type-II). The analysis of the solution unveils the following properties of f-divergences when used in the ERM-f DR problem: i) f-divergence regularization forces the support of the solution to coincide with the support of the reference measure, which introduces a strong inductive bias that dominates the evidence provided by the training data; and ii) any f-divergence regularization is equivalent to a different f-divergence regularization with an appropriate transformation of the empirical risk function.",
keywords = "Empirical risk minimization, regularization, statistical learning",
author = "Francisco Daunas and I{\~n}aki Esnaola and Perlaza, {Samir M.} and Poor, {H. Vincent}",
note = "Publisher Copyright: {\textcopyright} 2024 IEEE.; 2024 IEEE International Symposium on Information Theory, ISIT 2024 ; Conference date: 07-07-2024 Through 12-07-2024",
year = "2024",
doi = "10.1109/ISIT57864.2024.10619260",
language = "English (US)",
series = "IEEE International Symposium on Information Theory - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "759--764",
booktitle = "2024 IEEE International Symposium on Information Theory, ISIT 2024 - Proceedings",
address = "United States",
}