diff --git a/blueprint/src/sections/chernoff.tex b/blueprint/src/sections/chernoff.tex index 1aabe230..18f8bb6e 100644 --- a/blueprint/src/sections/chernoff.tex +++ b/blueprint/src/sections/chernoff.tex @@ -4,7 +4,7 @@ \section{Chernoff divergence} \label{def:Chernoff} \lean{ProbabilityTheory.chernoffDiv} \leanok - \uses{def:Renyi} + \uses{def:Renyi, lem:renyi_properties} The Chernoff divergence of order $\alpha > 0$ between two measures $\mu$ and $\nu$ on $\mathcal X$ is \begin{align*} C_\alpha(\mu, \nu) = \inf_{\xi \in \mathcal P(\mathcal X)}\max\{R_\alpha(\xi, \mu), R_\alpha(\xi, \nu)\} \: . @@ -28,6 +28,7 @@ \section{Chernoff divergence} This is $R_1 = \KL$ (by definition). \end{proof} + \begin{lemma}[Symmetry] \label{lem:chernoff_symm} %\lean{} @@ -40,6 +41,7 @@ \section{Chernoff divergence} Immediate from the definition. \end{proof} + \begin{lemma}[Monotonicity] \label{lem:chernoff_mono} %\lean{} @@ -53,6 +55,7 @@ \section{Chernoff divergence} Consequence of Lemma~\ref{lem:renyi_monotone}. \end{proof} + \begin{lemma} \label{lem:chernoff_eq_max_renyi} %\lean{} diff --git a/blueprint/src/sections/hellinger.tex b/blueprint/src/sections/hellinger.tex index ba33ed03..6195fab8 100644 --- a/blueprint/src/sections/hellinger.tex +++ b/blueprint/src/sections/hellinger.tex @@ -6,7 +6,7 @@ \section{Hellinger distance} \label{def:Hellinger} \lean{ProbabilityTheory.sqHellinger} \leanok - \uses{def:fDiv} + \uses{def:fDiv, lem:fDiv_properties} Let $\mu, \nu$ be two measures. The squared Hellinger distance between $\mu$ and $\nu$ is \begin{align*} \Hsq(\mu, \nu) = D_f(\mu, \nu) \quad \text{with } f: x \mapsto \frac{1}{2}\left( 1 - \sqrt{x} \right)^2 \: . diff --git a/blueprint/src/sections/renyi_divergence.tex b/blueprint/src/sections/renyi_divergence.tex index 925c5bc5..04e7cb68 100644 --- a/blueprint/src/sections/renyi_divergence.tex +++ b/blueprint/src/sections/renyi_divergence.tex @@ -158,6 +158,7 @@ \subsection{Properties inherited from f-divergences} The function $x \mapsto \frac{1}{\alpha - 1}\log (1 + (\alpha - 1)x)$ is non-decreasing and $D_f$ satisfies the DPI (Theorem~\ref{thm:fDiv_data_proc_2}), hence we get the DPI for $R_\alpha$. \end{proof} + \begin{lemma} \label{lem:renyi_data_proc_event} %\lean{} @@ -457,3 +458,39 @@ \subsection{Chain rule and tensorization} \begin{proof}%\leanok \uses{thm:renyi_prod} \end{proof} + + +TODO: find a way to hide the following dummy lemma +\begin{lemma}[Dummy lemma: Renyi properties] + \label{lem:renyi_properties} + %\lean{} + \leanok + \uses{def:Renyi, def:condRenyi, def:renyiMeasure} + Dummy node to summarize properties of the Rényi divergence. +\end{lemma} + +\begin{proof}\leanok +\uses{ + lem:renyiDiv_zero, + lem:renyiDiv_eq_top_iff_mutuallySingular_of_lt_one, + lem:renyi_eq_log_integral, + lem:renyi_eq_log_integral', + lem:renyi_symm, + lem:renyi_cgf, + lem:renyi_cgf_2, + thm:renyi_data_proc, + lem:renyi_data_proc_event, + lem:renyiMeasure_ac, + lem:rnDeriv_renyiMeasure, + lem:kl_renyiMeasure_eq, + cor:renyi_eq_add_kl, + lem:renyi_eq_inf_add_kl, + lem:renyi_eq_inf_kl, + thm:renyi_chain_rule, + thm:renyi_compProd_bayesInv, + cor:renyi_prod_two, + thm:renyi_prod, + lem:renyi_prod_n, + thm:renyi_prod_countable +} +\end{proof} \ No newline at end of file