Skip to content

Commit

Permalink
Merge pull request RemyDegenne#132 from RemyDegenne/blueprint
Browse files Browse the repository at this point in the history
Group nodes in the graph
  • Loading branch information
RemyDegenne authored Aug 29, 2024
2 parents 89932cd + 8e540e9 commit 7dbb831
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 2 deletions.
5 changes: 4 additions & 1 deletion blueprint/src/sections/chernoff.tex
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ \section{Chernoff divergence}
\label{def:Chernoff}
\lean{ProbabilityTheory.chernoffDiv}
\leanok
\uses{def:Renyi}
\uses{def:Renyi, lem:renyi_properties}
The Chernoff divergence of order $\alpha > 0$ between two measures $\mu$ and $\nu$ on $\mathcal X$ is
\begin{align*}
C_\alpha(\mu, \nu) = \inf_{\xi \in \mathcal P(\mathcal X)}\max\{R_\alpha(\xi, \mu), R_\alpha(\xi, \nu)\} \: .
Expand All @@ -28,6 +28,7 @@ \section{Chernoff divergence}
This is $R_1 = \KL$ (by definition).
\end{proof}


\begin{lemma}[Symmetry]
\label{lem:chernoff_symm}
%\lean{}
Expand All @@ -40,6 +41,7 @@ \section{Chernoff divergence}
Immediate from the definition.
\end{proof}


\begin{lemma}[Monotonicity]
\label{lem:chernoff_mono}
%\lean{}
Expand All @@ -53,6 +55,7 @@ \section{Chernoff divergence}
Consequence of Lemma~\ref{lem:renyi_monotone}.
\end{proof}


\begin{lemma}
\label{lem:chernoff_eq_max_renyi}
%\lean{}
Expand Down
2 changes: 1 addition & 1 deletion blueprint/src/sections/hellinger.tex
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ \section{Hellinger distance}
\label{def:Hellinger}
\lean{ProbabilityTheory.sqHellinger}
\leanok
\uses{def:fDiv}
\uses{def:fDiv, lem:fDiv_properties}
Let $\mu, \nu$ be two measures. The squared Hellinger distance between $\mu$ and $\nu$ is
\begin{align*}
\Hsq(\mu, \nu) = D_f(\mu, \nu) \quad \text{with } f: x \mapsto \frac{1}{2}\left( 1 - \sqrt{x} \right)^2 \: .
Expand Down
37 changes: 37 additions & 0 deletions blueprint/src/sections/renyi_divergence.tex
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,7 @@ \subsection{Properties inherited from f-divergences}
The function $x \mapsto \frac{1}{\alpha - 1}\log (1 + (\alpha - 1)x)$ is non-decreasing and $D_f$ satisfies the DPI (Theorem~\ref{thm:fDiv_data_proc_2}), hence we get the DPI for $R_\alpha$.
\end{proof}


\begin{lemma}
\label{lem:renyi_data_proc_event}
%\lean{}
Expand Down Expand Up @@ -457,3 +458,39 @@ \subsection{Chain rule and tensorization}
\begin{proof}%\leanok
\uses{thm:renyi_prod}
\end{proof}


TODO: find a way to hide the following dummy lemma
\begin{lemma}[Dummy lemma: Renyi properties]
\label{lem:renyi_properties}
%\lean{}
\leanok
\uses{def:Renyi, def:condRenyi, def:renyiMeasure}
Dummy node to summarize properties of the Rényi divergence.
\end{lemma}

\begin{proof}\leanok
\uses{
lem:renyiDiv_zero,
lem:renyiDiv_eq_top_iff_mutuallySingular_of_lt_one,
lem:renyi_eq_log_integral,
lem:renyi_eq_log_integral',
lem:renyi_symm,
lem:renyi_cgf,
lem:renyi_cgf_2,
thm:renyi_data_proc,
lem:renyi_data_proc_event,
lem:renyiMeasure_ac,
lem:rnDeriv_renyiMeasure,
lem:kl_renyiMeasure_eq,
cor:renyi_eq_add_kl,
lem:renyi_eq_inf_add_kl,
lem:renyi_eq_inf_kl,
thm:renyi_chain_rule,
thm:renyi_compProd_bayesInv,
cor:renyi_prod_two,
thm:renyi_prod,
lem:renyi_prod_n,
thm:renyi_prod_countable
}
\end{proof}

0 comments on commit 7dbb831

Please sign in to comment.