Skip to content

Commit

Permalink
blueprint: simple fDiv lemmas
Browse files Browse the repository at this point in the history
  • Loading branch information
RemyDegenne committed Mar 7, 2024
1 parent f421845 commit 5112422
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 3 deletions.
39 changes: 39 additions & 0 deletions blueprint/src/sections/f_divergence.tex
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,45 @@ \section{Definition and basic properties}
$\frac{d \mu}{d \mu}(x) = 1$ almost everywhere and $f(1) = 0$.
\end{proof}

\begin{lemma}
\label{lem:fDiv_mul}
%\lean{}
%\leanok
\uses{def:fDiv}
For all $a \in \mathbb{R}$, $D_{a f}(\mu, \nu) = a D_{f}(\mu, \nu)$.
\end{lemma}

\begin{proof}
Linearity of the integral.
\end{proof}

\begin{lemma}
\label{lem:fDiv_linear}
%\lean{}
%\leanok
\uses{def:fDiv}
For finite measures $\mu$ and $\nu$ with $\mu(\mathcal X) = \nu(\mathcal X)$, $D_{x - 1}(\mu, \nu) = 0$.
\end{lemma}

\begin{proof}
Computation.
\end{proof}

\begin{lemma}
\label{lem:fDiv_add_linear}
%\lean{}
%\leanok
\uses{def:fDiv}
For finite measures $\mu$ and $\nu$ with $\mu(\mathcal X) = \nu(\mathcal X)$, for all $a \in \mathbb{R}$, $D_{f + a(x - 1)}(\mu, \nu) = D_{f}(\mu, \nu)$.
\end{lemma}

This means that we can always choose $f'(1) = 0$ if we want (and $f$ is differentiable at $1$).

\begin{proof}
\uses{lem:fDiv_add, lem:fDiv_mul, lem:fDiv_linear}
Linearity (Lemmas~\ref{lem:fDiv_add} and~\ref{lem:fDiv_mul}), then Lemma~\ref{lem:fDiv_linear}.
\end{proof}

\section{Conditional f-divergence}

\begin{lemma}
Expand Down
2 changes: 1 addition & 1 deletion blueprint/src/sections/kl_divergence.tex
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ \chapter{Kullback-Leibler divergence}
%\lean{}
%\leanok
\uses{def:KL, def:fDiv}
$\KL(\mu, \nu) = D_f(\mu, \nu)$ for $f: x \mapsto x \log x$ or $f: x \mapsto x \log x - x + 1$.
$\KL(\mu, \nu) = D_f(\mu, \nu)$ for $f: x \mapsto x \log x$ or, for probability measures, $f: x \mapsto x \log x - x + 1$.
\end{lemma}

\begin{proof}
Expand Down
4 changes: 2 additions & 2 deletions blueprint/src/sections/renyi_divergence.tex
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ \chapter{Rényi divergences}
%\lean{}
%\leanok
\uses{def:fDiv, def:Renyi}
$R_\alpha(\mu, \nu) = \frac{1}{\alpha - 1} \log (1 + (\alpha - 1) D_f(\nu, \mu))$ for $f : x \mapsto \frac{1}{\alpha - 1}(x^{\alpha} - \alpha x + \alpha - 1)$. It is thus a monotone transformation of a f-divergence.
$R_\alpha(\mu, \nu) = \frac{1}{\alpha - 1} \log (1 + (\alpha - 1) D_f(\nu, \mu))$ for $f : x \mapsto \frac{1}{\alpha - 1}(x^{\alpha} - 1)$, which is convex with $f(1)=0$. It is thus a monotone transformation of a f-divergence.

TODO: use this for the definition?
\end{lemma}
Expand Down Expand Up @@ -72,7 +72,7 @@ \chapter{Rényi divergences}
%\lean{}
%\leanok
\uses{def:Renyi}
Let $\mu, \nu$ be two measures on $\mathcal X$. Let $n \in \mathbb{N}$ and write $\mu^{\otimes n}$ for the product measure on $\mathbb{R}^n$ of $n$ times $\mu$.
Let $\mu, \nu$ be two measures on $\mathcal X$. Let $n \in \mathbb{N}$ and write $\mu^{\otimes n}$ for the product measure on $\mathcal X^n$ of $n$ times $\mu$.
Then $R_\alpha(\mu^{\otimes n}, \nu^{\otimes n}) = n R_\alpha(\mu, \nu)$.
\end{corollary}

Expand Down

0 comments on commit 5112422

Please sign in to comment.