src/HOL/Probability/Information.thy
 author hoelzl Thu Jun 09 14:04:34 2011 +0200 (2011-06-09) changeset 43340 60e181c4eae4 parent 42148 d596e7bb251f child 43556 0d78c8d31d0d permissions -rw-r--r--
lemma: independence is equal to mutual information = 0
```     1 (*  Title:      HOL/Probability/Information.thy
```
```     2     Author:     Johannes Hölzl, TU München
```
```     3     Author:     Armin Heller, TU München
```
```     4 *)
```
```     5
```
```     6 header {*Information theory*}
```
```     7
```
```     8 theory Information
```
```     9 imports
```
```    10   Independent_Family
```
```    11   "~~/src/HOL/Library/Convex"
```
```    12 begin
```
```    13
```
```    14 lemma log_le: "1 < a \<Longrightarrow> 0 < x \<Longrightarrow> x \<le> y \<Longrightarrow> log a x \<le> log a y"
```
```    15   by (subst log_le_cancel_iff) auto
```
```    16
```
```    17 lemma log_less: "1 < a \<Longrightarrow> 0 < x \<Longrightarrow> x < y \<Longrightarrow> log a x < log a y"
```
```    18   by (subst log_less_cancel_iff) auto
```
```    19
```
```    20 lemma setsum_cartesian_product':
```
```    21   "(\<Sum>x\<in>A \<times> B. f x) = (\<Sum>x\<in>A. setsum (\<lambda>y. f (x, y)) B)"
```
```    22   unfolding setsum_cartesian_product by simp
```
```    23
```
```    24 section "Convex theory"
```
```    25
```
```    26 lemma log_setsum:
```
```    27   assumes "finite s" "s \<noteq> {}"
```
```    28   assumes "b > 1"
```
```    29   assumes "(\<Sum> i \<in> s. a i) = 1"
```
```    30   assumes "\<And> i. i \<in> s \<Longrightarrow> a i \<ge> 0"
```
```    31   assumes "\<And> i. i \<in> s \<Longrightarrow> y i \<in> {0 <..}"
```
```    32   shows "log b (\<Sum> i \<in> s. a i * y i) \<ge> (\<Sum> i \<in> s. a i * log b (y i))"
```
```    33 proof -
```
```    34   have "convex_on {0 <..} (\<lambda> x. - log b x)"
```
```    35     by (rule minus_log_convex[OF `b > 1`])
```
```    36   hence "- log b (\<Sum> i \<in> s. a i * y i) \<le> (\<Sum> i \<in> s. a i * - log b (y i))"
```
```    37     using convex_on_setsum[of _ _ "\<lambda> x. - log b x"] assms pos_is_convex by fastsimp
```
```    38   thus ?thesis by (auto simp add:setsum_negf le_imp_neg_le)
```
```    39 qed
```
```    40
```
```    41 lemma log_setsum':
```
```    42   assumes "finite s" "s \<noteq> {}"
```
```    43   assumes "b > 1"
```
```    44   assumes "(\<Sum> i \<in> s. a i) = 1"
```
```    45   assumes pos: "\<And> i. i \<in> s \<Longrightarrow> 0 \<le> a i"
```
```    46           "\<And> i. \<lbrakk> i \<in> s ; 0 < a i \<rbrakk> \<Longrightarrow> 0 < y i"
```
```    47   shows "log b (\<Sum> i \<in> s. a i * y i) \<ge> (\<Sum> i \<in> s. a i * log b (y i))"
```
```    48 proof -
```
```    49   have "\<And>y. (\<Sum> i \<in> s - {i. a i = 0}. a i * y i) = (\<Sum> i \<in> s. a i * y i)"
```
```    50     using assms by (auto intro!: setsum_mono_zero_cong_left)
```
```    51   moreover have "log b (\<Sum> i \<in> s - {i. a i = 0}. a i * y i) \<ge> (\<Sum> i \<in> s - {i. a i = 0}. a i * log b (y i))"
```
```    52   proof (rule log_setsum)
```
```    53     have "setsum a (s - {i. a i = 0}) = setsum a s"
```
```    54       using assms(1) by (rule setsum_mono_zero_cong_left) auto
```
```    55     thus sum_1: "setsum a (s - {i. a i = 0}) = 1"
```
```    56       "finite (s - {i. a i = 0})" using assms by simp_all
```
```    57
```
```    58     show "s - {i. a i = 0} \<noteq> {}"
```
```    59     proof
```
```    60       assume *: "s - {i. a i = 0} = {}"
```
```    61       hence "setsum a (s - {i. a i = 0}) = 0" by (simp add: * setsum_empty)
```
```    62       with sum_1 show False by simp
```
```    63     qed
```
```    64
```
```    65     fix i assume "i \<in> s - {i. a i = 0}"
```
```    66     hence "i \<in> s" "a i \<noteq> 0" by simp_all
```
```    67     thus "0 \<le> a i" "y i \<in> {0<..}" using pos[of i] by auto
```
```    68   qed fact+
```
```    69   ultimately show ?thesis by simp
```
```    70 qed
```
```    71
```
```    72 lemma log_setsum_divide:
```
```    73   assumes "finite S" and "S \<noteq> {}" and "1 < b"
```
```    74   assumes "(\<Sum>x\<in>S. g x) = 1"
```
```    75   assumes pos: "\<And>x. x \<in> S \<Longrightarrow> g x \<ge> 0" "\<And>x. x \<in> S \<Longrightarrow> f x \<ge> 0"
```
```    76   assumes g_pos: "\<And>x. \<lbrakk> x \<in> S ; 0 < g x \<rbrakk> \<Longrightarrow> 0 < f x"
```
```    77   shows "- (\<Sum>x\<in>S. g x * log b (g x / f x)) \<le> log b (\<Sum>x\<in>S. f x)"
```
```    78 proof -
```
```    79   have log_mono: "\<And>x y. 0 < x \<Longrightarrow> x \<le> y \<Longrightarrow> log b x \<le> log b y"
```
```    80     using `1 < b` by (subst log_le_cancel_iff) auto
```
```    81
```
```    82   have "- (\<Sum>x\<in>S. g x * log b (g x / f x)) = (\<Sum>x\<in>S. g x * log b (f x / g x))"
```
```    83   proof (unfold setsum_negf[symmetric], rule setsum_cong)
```
```    84     fix x assume x: "x \<in> S"
```
```    85     show "- (g x * log b (g x / f x)) = g x * log b (f x / g x)"
```
```    86     proof (cases "g x = 0")
```
```    87       case False
```
```    88       with pos[OF x] g_pos[OF x] have "0 < f x" "0 < g x" by simp_all
```
```    89       thus ?thesis using `1 < b` by (simp add: log_divide field_simps)
```
```    90     qed simp
```
```    91   qed rule
```
```    92   also have "... \<le> log b (\<Sum>x\<in>S. g x * (f x / g x))"
```
```    93   proof (rule log_setsum')
```
```    94     fix x assume x: "x \<in> S" "0 < g x"
```
```    95     with g_pos[OF x] show "0 < f x / g x" by (safe intro!: divide_pos_pos)
```
```    96   qed fact+
```
```    97   also have "... = log b (\<Sum>x\<in>S - {x. g x = 0}. f x)" using `finite S`
```
```    98     by (auto intro!: setsum_mono_zero_cong_right arg_cong[where f="log b"]
```
```    99         split: split_if_asm)
```
```   100   also have "... \<le> log b (\<Sum>x\<in>S. f x)"
```
```   101   proof (rule log_mono)
```
```   102     have "0 = (\<Sum>x\<in>S - {x. g x = 0}. 0)" by simp
```
```   103     also have "... < (\<Sum>x\<in>S - {x. g x = 0}. f x)" (is "_ < ?sum")
```
```   104     proof (rule setsum_strict_mono)
```
```   105       show "finite (S - {x. g x = 0})" using `finite S` by simp
```
```   106       show "S - {x. g x = 0} \<noteq> {}"
```
```   107       proof
```
```   108         assume "S - {x. g x = 0} = {}"
```
```   109         hence "(\<Sum>x\<in>S. g x) = 0" by (subst setsum_0') auto
```
```   110         with `(\<Sum>x\<in>S. g x) = 1` show False by simp
```
```   111       qed
```
```   112       fix x assume "x \<in> S - {x. g x = 0}"
```
```   113       thus "0 < f x" using g_pos[of x] pos(1)[of x] by auto
```
```   114     qed
```
```   115     finally show "0 < ?sum" .
```
```   116     show "(\<Sum>x\<in>S - {x. g x = 0}. f x) \<le> (\<Sum>x\<in>S. f x)"
```
```   117       using `finite S` pos by (auto intro!: setsum_mono2)
```
```   118   qed
```
```   119   finally show ?thesis .
```
```   120 qed
```
```   121
```
```   122 lemma split_pairs:
```
```   123   "((A, B) = X) \<longleftrightarrow> (fst X = A \<and> snd X = B)" and
```
```   124   "(X = (A, B)) \<longleftrightarrow> (fst X = A \<and> snd X = B)" by auto
```
```   125
```
```   126 section "Information theory"
```
```   127
```
```   128 locale information_space = prob_space +
```
```   129   fixes b :: real assumes b_gt_1: "1 < b"
```
```   130
```
```   131 context information_space
```
```   132 begin
```
```   133
```
```   134 text {* Introduce some simplification rules for logarithm of base @{term b}. *}
```
```   135
```
```   136 lemma log_neg_const:
```
```   137   assumes "x \<le> 0"
```
```   138   shows "log b x = log b 0"
```
```   139 proof -
```
```   140   { fix u :: real
```
```   141     have "x \<le> 0" by fact
```
```   142     also have "0 < exp u"
```
```   143       using exp_gt_zero .
```
```   144     finally have "exp u \<noteq> x"
```
```   145       by auto }
```
```   146   then show "log b x = log b 0"
```
```   147     by (simp add: log_def ln_def)
```
```   148 qed
```
```   149
```
```   150 lemma log_mult_eq:
```
```   151   "log b (A * B) = (if 0 < A * B then log b \<bar>A\<bar> + log b \<bar>B\<bar> else log b 0)"
```
```   152   using log_mult[of b "\<bar>A\<bar>" "\<bar>B\<bar>"] b_gt_1 log_neg_const[of "A * B"]
```
```   153   by (auto simp: zero_less_mult_iff mult_le_0_iff)
```
```   154
```
```   155 lemma log_inverse_eq:
```
```   156   "log b (inverse B) = (if 0 < B then - log b B else log b 0)"
```
```   157   using log_inverse[of b B] log_neg_const[of "inverse B"] b_gt_1 by simp
```
```   158
```
```   159 lemma log_divide_eq:
```
```   160   "log b (A / B) = (if 0 < A * B then log b \<bar>A\<bar> - log b \<bar>B\<bar> else log b 0)"
```
```   161   unfolding divide_inverse log_mult_eq log_inverse_eq abs_inverse
```
```   162   by (auto simp: zero_less_mult_iff mult_le_0_iff)
```
```   163
```
```   164 lemmas log_simps = log_mult_eq log_inverse_eq log_divide_eq
```
```   165
```
```   166 end
```
```   167
```
```   168 subsection "Kullback\$-\$Leibler divergence"
```
```   169
```
```   170 text {* The Kullback\$-\$Leibler divergence is also known as relative entropy or
```
```   171 Kullback\$-\$Leibler distance. *}
```
```   172
```
```   173 definition
```
```   174   "entropy_density b M \<nu> = log b \<circ> real \<circ> RN_deriv M \<nu>"
```
```   175
```
```   176 definition
```
```   177   "KL_divergence b M \<nu> = integral\<^isup>L (M\<lparr>measure := \<nu>\<rparr>) (entropy_density b M \<nu>)"
```
```   178
```
```   179 lemma (in information_space) measurable_entropy_density:
```
```   180   assumes ps: "prob_space (M\<lparr>measure := \<nu>\<rparr>)"
```
```   181   assumes ac: "absolutely_continuous \<nu>"
```
```   182   shows "entropy_density b M \<nu> \<in> borel_measurable M"
```
```   183 proof -
```
```   184   interpret \<nu>: prob_space "M\<lparr>measure := \<nu>\<rparr>" by fact
```
```   185   have "measure_space (M\<lparr>measure := \<nu>\<rparr>)" by fact
```
```   186   from RN_deriv[OF this ac] b_gt_1 show ?thesis
```
```   187     unfolding entropy_density_def
```
```   188     by (intro measurable_comp) auto
```
```   189 qed
```
```   190
```
```   191 lemma (in information_space) KL_gt_0:
```
```   192   assumes ps: "prob_space (M\<lparr>measure := \<nu>\<rparr>)"
```
```   193   assumes ac: "absolutely_continuous \<nu>"
```
```   194   assumes int: "integrable (M\<lparr> measure := \<nu> \<rparr>) (entropy_density b M \<nu>)"
```
```   195   assumes A: "A \<in> sets M" "\<nu> A \<noteq> \<mu> A"
```
```   196   shows "0 < KL_divergence b M \<nu>"
```
```   197 proof -
```
```   198   interpret \<nu>: prob_space "M\<lparr>measure := \<nu>\<rparr>" by fact
```
```   199   have ms: "measure_space (M\<lparr>measure := \<nu>\<rparr>)" by default
```
```   200   have fms: "finite_measure (M\<lparr>measure := \<nu>\<rparr>)" by default
```
```   201   note RN = RN_deriv[OF ms ac]
```
```   202
```
```   203   from real_RN_deriv[OF fms ac] guess D . note D = this
```
```   204   with absolutely_continuous_AE[OF ms] ac
```
```   205   have D\<nu>: "AE x in M\<lparr>measure := \<nu>\<rparr>. RN_deriv M \<nu> x = extreal (D x)"
```
```   206     by auto
```
```   207
```
```   208   def f \<equiv> "\<lambda>x. if D x = 0 then 1 else 1 / D x"
```
```   209   with D have f_borel: "f \<in> borel_measurable M"
```
```   210     by (auto intro!: measurable_If)
```
```   211
```
```   212   have "KL_divergence b M \<nu> = 1 / ln b * (\<integral> x. ln b * entropy_density b M \<nu> x \<partial>M\<lparr>measure := \<nu>\<rparr>)"
```
```   213     unfolding KL_divergence_def using int b_gt_1
```
```   214     by (simp add: integral_cmult)
```
```   215
```
```   216   { fix A assume "A \<in> sets M"
```
```   217     with RN D have "\<nu>.\<mu> A = (\<integral>\<^isup>+ x. extreal (D x) * indicator A x \<partial>M)"
```
```   218       by (auto intro!: positive_integral_cong_AE) }
```
```   219   note D_density = this
```
```   220
```
```   221   have ln_entropy: "(\<lambda>x. ln b * entropy_density b M \<nu> x) \<in> borel_measurable M"
```
```   222     using measurable_entropy_density[OF ps ac] by auto
```
```   223
```
```   224   have "integrable (M\<lparr>measure := \<nu>\<rparr>) (\<lambda>x. ln b * entropy_density b M \<nu> x)"
```
```   225     using int by auto
```
```   226   moreover have "integrable (M\<lparr>measure := \<nu>\<rparr>) (\<lambda>x. ln b * entropy_density b M \<nu> x) \<longleftrightarrow>
```
```   227       integrable M (\<lambda>x. D x * (ln b * entropy_density b M \<nu> x))"
```
```   228     using D D_density ln_entropy
```
```   229     by (intro integral_translated_density) auto
```
```   230   ultimately have M_int: "integrable M (\<lambda>x. D x * (ln b * entropy_density b M \<nu> x))"
```
```   231     by simp
```
```   232
```
```   233   have D_neg: "(\<integral>\<^isup>+ x. extreal (- D x) \<partial>M) = 0"
```
```   234     using D by (subst positive_integral_0_iff_AE) auto
```
```   235
```
```   236   have "(\<integral>\<^isup>+ x. extreal (D x) \<partial>M) = \<nu> (space M)"
```
```   237     using RN D by (auto intro!: positive_integral_cong_AE)
```
```   238   then have D_pos: "(\<integral>\<^isup>+ x. extreal (D x) \<partial>M) = 1"
```
```   239     using \<nu>.measure_space_1 by simp
```
```   240
```
```   241   have "integrable M D"
```
```   242     using D_pos D_neg D by (auto simp: integrable_def)
```
```   243
```
```   244   have "integral\<^isup>L M D = 1"
```
```   245     using D_pos D_neg by (auto simp: lebesgue_integral_def)
```
```   246
```
```   247   let ?D_set = "{x\<in>space M. D x \<noteq> 0}"
```
```   248   have [simp, intro]: "?D_set \<in> sets M"
```
```   249     using D by (auto intro: sets_Collect)
```
```   250
```
```   251   have "0 \<le> 1 - \<mu>' ?D_set"
```
```   252     using prob_le_1 by (auto simp: field_simps)
```
```   253   also have "\<dots> = (\<integral> x. D x - indicator ?D_set x \<partial>M)"
```
```   254     using `integrable M D` `integral\<^isup>L M D = 1`
```
```   255     by (simp add: \<mu>'_def)
```
```   256   also have "\<dots> < (\<integral> x. D x * (ln b * entropy_density b M \<nu> x) \<partial>M)"
```
```   257   proof (rule integral_less_AE)
```
```   258     show "integrable M (\<lambda>x. D x - indicator ?D_set x)"
```
```   259       using `integrable M D`
```
```   260       by (intro integral_diff integral_indicator) auto
```
```   261   next
```
```   262     show "integrable M (\<lambda>x. D x * (ln b * entropy_density b M \<nu> x))"
```
```   263       by fact
```
```   264   next
```
```   265     show "\<mu> {x\<in>space M. D x \<noteq> 1 \<and> D x \<noteq> 0} \<noteq> 0"
```
```   266     proof
```
```   267       assume eq_0: "\<mu> {x\<in>space M. D x \<noteq> 1 \<and> D x \<noteq> 0} = 0"
```
```   268       then have disj: "AE x. D x = 1 \<or> D x = 0"
```
```   269         using D(1) by (auto intro!: AE_I[OF subset_refl] sets_Collect)
```
```   270
```
```   271       have "\<mu> {x\<in>space M. D x = 1} = (\<integral>\<^isup>+ x. indicator {x\<in>space M. D x = 1} x \<partial>M)"
```
```   272         using D(1) by auto
```
```   273       also have "\<dots> = (\<integral>\<^isup>+ x. extreal (D x) * indicator {x\<in>space M. D x \<noteq> 0} x \<partial>M)"
```
```   274         using disj by (auto intro!: positive_integral_cong_AE simp: indicator_def one_extreal_def)
```
```   275       also have "\<dots> = \<nu> {x\<in>space M. D x \<noteq> 0}"
```
```   276         using D(1) D_density by auto
```
```   277       also have "\<dots> = \<nu> (space M)"
```
```   278         using D_density D(1) by (auto intro!: positive_integral_cong simp: indicator_def)
```
```   279       finally have "AE x. D x = 1"
```
```   280         using D(1) \<nu>.measure_space_1 by (intro AE_I_eq_1) auto
```
```   281       then have "(\<integral>\<^isup>+x. indicator A x\<partial>M) = (\<integral>\<^isup>+x. extreal (D x) * indicator A x\<partial>M)"
```
```   282         by (intro positive_integral_cong_AE) (auto simp: one_extreal_def[symmetric])
```
```   283       also have "\<dots> = \<nu> A"
```
```   284         using `A \<in> sets M` D_density by simp
```
```   285       finally show False using `A \<in> sets M` `\<nu> A \<noteq> \<mu> A` by simp
```
```   286     qed
```
```   287     show "{x\<in>space M. D x \<noteq> 1 \<and> D x \<noteq> 0} \<in> sets M"
```
```   288       using D(1) by (auto intro: sets_Collect)
```
```   289
```
```   290     show "AE t. t \<in> {x\<in>space M. D x \<noteq> 1 \<and> D x \<noteq> 0} \<longrightarrow>
```
```   291       D t - indicator ?D_set t \<noteq> D t * (ln b * entropy_density b M \<nu> t)"
```
```   292       using D(2)
```
```   293     proof (elim AE_mp, safe intro!: AE_I2)
```
```   294       fix t assume Dt: "t \<in> space M" "D t \<noteq> 1" "D t \<noteq> 0"
```
```   295         and RN: "RN_deriv M \<nu> t = extreal (D t)"
```
```   296         and eq: "D t - indicator ?D_set t = D t * (ln b * entropy_density b M \<nu> t)"
```
```   297
```
```   298       have "D t - 1 = D t - indicator ?D_set t"
```
```   299         using Dt by simp
```
```   300       also note eq
```
```   301       also have "D t * (ln b * entropy_density b M \<nu> t) = - D t * ln (1 / D t)"
```
```   302         using RN b_gt_1 `D t \<noteq> 0` `0 \<le> D t`
```
```   303         by (simp add: entropy_density_def log_def ln_div less_le)
```
```   304       finally have "ln (1 / D t) = 1 / D t - 1"
```
```   305         using `D t \<noteq> 0` by (auto simp: field_simps)
```
```   306       from ln_eq_minus_one[OF _ this] `D t \<noteq> 0` `0 \<le> D t` `D t \<noteq> 1`
```
```   307       show False by auto
```
```   308     qed
```
```   309
```
```   310     show "AE t. D t - indicator ?D_set t \<le> D t * (ln b * entropy_density b M \<nu> t)"
```
```   311       using D(2)
```
```   312     proof (elim AE_mp, intro AE_I2 impI)
```
```   313       fix t assume "t \<in> space M" and RN: "RN_deriv M \<nu> t = extreal (D t)"
```
```   314       show "D t - indicator ?D_set t \<le> D t * (ln b * entropy_density b M \<nu> t)"
```
```   315       proof cases
```
```   316         assume asm: "D t \<noteq> 0"
```
```   317         then have "0 < D t" using `0 \<le> D t` by auto
```
```   318         then have "0 < 1 / D t" by auto
```
```   319         have "D t - indicator ?D_set t \<le> - D t * (1 / D t - 1)"
```
```   320           using asm `t \<in> space M` by (simp add: field_simps)
```
```   321         also have "- D t * (1 / D t - 1) \<le> - D t * ln (1 / D t)"
```
```   322           using ln_le_minus_one `0 < 1 / D t` by (intro mult_left_mono_neg) auto
```
```   323         also have "\<dots> = D t * (ln b * entropy_density b M \<nu> t)"
```
```   324           using `0 < D t` RN b_gt_1
```
```   325           by (simp_all add: log_def ln_div entropy_density_def)
```
```   326         finally show ?thesis by simp
```
```   327       qed simp
```
```   328     qed
```
```   329   qed
```
```   330   also have "\<dots> = (\<integral> x. ln b * entropy_density b M \<nu> x \<partial>M\<lparr>measure := \<nu>\<rparr>)"
```
```   331     using D D_density ln_entropy
```
```   332     by (intro integral_translated_density[symmetric]) auto
```
```   333   also have "\<dots> = ln b * (\<integral> x. entropy_density b M \<nu> x \<partial>M\<lparr>measure := \<nu>\<rparr>)"
```
```   334     using int by (rule \<nu>.integral_cmult)
```
```   335   finally show "0 < KL_divergence b M \<nu>"
```
```   336     using b_gt_1 by (auto simp: KL_divergence_def zero_less_mult_iff)
```
```   337 qed
```
```   338
```
```   339 lemma (in sigma_finite_measure) KL_eq_0:
```
```   340   assumes eq: "\<forall>A\<in>sets M. \<nu> A = measure M A"
```
```   341   shows "KL_divergence b M \<nu> = 0"
```
```   342 proof -
```
```   343   have "AE x. 1 = RN_deriv M \<nu> x"
```
```   344   proof (rule RN_deriv_unique)
```
```   345     show "measure_space (M\<lparr>measure := \<nu>\<rparr>)"
```
```   346       using eq by (intro measure_space_cong) auto
```
```   347     show "absolutely_continuous \<nu>"
```
```   348       unfolding absolutely_continuous_def using eq by auto
```
```   349     show "(\<lambda>x. 1) \<in> borel_measurable M" "AE x. 0 \<le> (1 :: extreal)" by auto
```
```   350     fix A assume "A \<in> sets M"
```
```   351     with eq show "\<nu> A = \<integral>\<^isup>+ x. 1 * indicator A x \<partial>M" by simp
```
```   352   qed
```
```   353   then have "AE x. log b (real (RN_deriv M \<nu> x)) = 0"
```
```   354     by (elim AE_mp) simp
```
```   355   from integral_cong_AE[OF this]
```
```   356   have "integral\<^isup>L M (entropy_density b M \<nu>) = 0"
```
```   357     by (simp add: entropy_density_def comp_def)
```
```   358   with eq show "KL_divergence b M \<nu> = 0"
```
```   359     unfolding KL_divergence_def
```
```   360     by (subst integral_cong_measure) auto
```
```   361 qed
```
```   362
```
```   363 lemma (in information_space) KL_eq_0_imp:
```
```   364   assumes ps: "prob_space (M\<lparr>measure := \<nu>\<rparr>)"
```
```   365   assumes ac: "absolutely_continuous \<nu>"
```
```   366   assumes int: "integrable (M\<lparr> measure := \<nu> \<rparr>) (entropy_density b M \<nu>)"
```
```   367   assumes KL: "KL_divergence b M \<nu> = 0"
```
```   368   shows "\<forall>A\<in>sets M. \<nu> A = \<mu> A"
```
```   369   by (metis less_imp_neq KL_gt_0 assms)
```
```   370
```
```   371 lemma (in information_space) KL_ge_0:
```
```   372   assumes ps: "prob_space (M\<lparr>measure := \<nu>\<rparr>)"
```
```   373   assumes ac: "absolutely_continuous \<nu>"
```
```   374   assumes int: "integrable (M\<lparr> measure := \<nu> \<rparr>) (entropy_density b M \<nu>)"
```
```   375   shows "0 \<le> KL_divergence b M \<nu>"
```
```   376   using KL_eq_0 KL_gt_0[OF ps ac int]
```
```   377   by (cases "\<forall>A\<in>sets M. \<nu> A = measure M A") (auto simp: le_less)
```
```   378
```
```   379
```
```   380 lemma (in sigma_finite_measure) KL_divergence_vimage:
```
```   381   assumes T: "T \<in> measure_preserving M M'"
```
```   382     and T': "T' \<in> measure_preserving (M'\<lparr> measure := \<nu>' \<rparr>) (M\<lparr> measure := \<nu> \<rparr>)"
```
```   383     and inv: "\<And>x. x \<in> space M \<Longrightarrow> T' (T x) = x"
```
```   384     and inv': "\<And>x. x \<in> space M' \<Longrightarrow> T (T' x) = x"
```
```   385   and \<nu>': "measure_space (M'\<lparr>measure := \<nu>'\<rparr>)" "measure_space.absolutely_continuous M' \<nu>'"
```
```   386   and "1 < b"
```
```   387   shows "KL_divergence b M' \<nu>' = KL_divergence b M \<nu>"
```
```   388 proof -
```
```   389   interpret \<nu>': measure_space "M'\<lparr>measure := \<nu>'\<rparr>" by fact
```
```   390   have M: "measure_space (M\<lparr> measure := \<nu>\<rparr>)"
```
```   391     by (rule \<nu>'.measure_space_vimage[OF _ T'], simp) default
```
```   392   have "sigma_algebra (M'\<lparr> measure := \<nu>'\<rparr>)" by default
```
```   393   then have saM': "sigma_algebra M'" by simp
```
```   394   then interpret M': measure_space M' by (rule measure_space_vimage) fact
```
```   395   have ac: "absolutely_continuous \<nu>" unfolding absolutely_continuous_def
```
```   396   proof safe
```
```   397     fix N assume N: "N \<in> sets M" and N_0: "\<mu> N = 0"
```
```   398     then have N': "T' -` N \<inter> space M' \<in> sets M'"
```
```   399       using T' by (auto simp: measurable_def measure_preserving_def)
```
```   400     have "T -` (T' -` N \<inter> space M') \<inter> space M = N"
```
```   401       using inv T N sets_into_space[OF N] by (auto simp: measurable_def measure_preserving_def)
```
```   402     then have "measure M' (T' -` N \<inter> space M') = 0"
```
```   403       using measure_preservingD[OF T N'] N_0 by auto
```
```   404     with \<nu>'(2) N' show "\<nu> N = 0" using measure_preservingD[OF T', of N] N
```
```   405       unfolding M'.absolutely_continuous_def measurable_def by auto
```
```   406   qed
```
```   407
```
```   408   have sa: "sigma_algebra (M\<lparr>measure := \<nu>\<rparr>)" by simp default
```
```   409   have AE: "AE x. RN_deriv M' \<nu>' (T x) = RN_deriv M \<nu> x"
```
```   410     by (rule RN_deriv_vimage[OF T T' inv \<nu>'])
```
```   411   show ?thesis
```
```   412     unfolding KL_divergence_def entropy_density_def comp_def
```
```   413   proof (subst \<nu>'.integral_vimage[OF sa T'])
```
```   414     show "(\<lambda>x. log b (real (RN_deriv M \<nu> x))) \<in> borel_measurable (M\<lparr>measure := \<nu>\<rparr>)"
```
```   415       by (auto intro!: RN_deriv[OF M ac] borel_measurable_log[OF _ `1 < b`])
```
```   416     have "(\<integral> x. log b (real (RN_deriv M' \<nu>' x)) \<partial>M'\<lparr>measure := \<nu>'\<rparr>) =
```
```   417       (\<integral> x. log b (real (RN_deriv M' \<nu>' (T (T' x)))) \<partial>M'\<lparr>measure := \<nu>'\<rparr>)" (is "?l = _")
```
```   418       using inv' by (auto intro!: \<nu>'.integral_cong)
```
```   419     also have "\<dots> = (\<integral> x. log b (real (RN_deriv M \<nu> (T' x))) \<partial>M'\<lparr>measure := \<nu>'\<rparr>)" (is "_ = ?r")
```
```   420       using M ac AE
```
```   421       by (intro \<nu>'.integral_cong_AE \<nu>'.almost_everywhere_vimage[OF sa T'] absolutely_continuous_AE[OF M])
```
```   422          (auto elim!: AE_mp)
```
```   423     finally show "?l = ?r" .
```
```   424   qed
```
```   425 qed
```
```   426
```
```   427 lemma (in sigma_finite_measure) KL_divergence_cong:
```
```   428   assumes "measure_space (M\<lparr>measure := \<nu>\<rparr>)" (is "measure_space ?\<nu>")
```
```   429   assumes [simp]: "sets N = sets M" "space N = space M"
```
```   430     "\<And>A. A \<in> sets M \<Longrightarrow> measure N A = \<mu> A"
```
```   431     "\<And>A. A \<in> sets M \<Longrightarrow> \<nu> A = \<nu>' A"
```
```   432   shows "KL_divergence b M \<nu> = KL_divergence b N \<nu>'"
```
```   433 proof -
```
```   434   interpret \<nu>: measure_space ?\<nu> by fact
```
```   435   have "KL_divergence b M \<nu> = \<integral>x. log b (real (RN_deriv N \<nu>' x)) \<partial>?\<nu>"
```
```   436     by (simp cong: RN_deriv_cong \<nu>.integral_cong add: KL_divergence_def entropy_density_def)
```
```   437   also have "\<dots> = KL_divergence b N \<nu>'"
```
```   438     by (auto intro!: \<nu>.integral_cong_measure[symmetric] simp: KL_divergence_def entropy_density_def comp_def)
```
```   439   finally show ?thesis .
```
```   440 qed
```
```   441
```
```   442 lemma (in finite_measure_space) KL_divergence_eq_finite:
```
```   443   assumes v: "finite_measure_space (M\<lparr>measure := \<nu>\<rparr>)"
```
```   444   assumes ac: "absolutely_continuous \<nu>"
```
```   445   shows "KL_divergence b M \<nu> = (\<Sum>x\<in>space M. real (\<nu> {x}) * log b (real (\<nu> {x}) / real (\<mu> {x})))" (is "_ = ?sum")
```
```   446 proof (simp add: KL_divergence_def finite_measure_space.integral_finite_singleton[OF v] entropy_density_def)
```
```   447   interpret v: finite_measure_space "M\<lparr>measure := \<nu>\<rparr>" by fact
```
```   448   have ms: "measure_space (M\<lparr>measure := \<nu>\<rparr>)" by default
```
```   449   show "(\<Sum>x \<in> space M. log b (real (RN_deriv M \<nu> x)) * real (\<nu> {x})) = ?sum"
```
```   450     using RN_deriv_finite_measure[OF ms ac]
```
```   451     by (auto intro!: setsum_cong simp: field_simps)
```
```   452 qed
```
```   453
```
```   454 lemma (in finite_prob_space) KL_divergence_positive_finite:
```
```   455   assumes v: "finite_prob_space (M\<lparr>measure := \<nu>\<rparr>)"
```
```   456   assumes ac: "absolutely_continuous \<nu>"
```
```   457   and "1 < b"
```
```   458   shows "0 \<le> KL_divergence b M \<nu>"
```
```   459 proof -
```
```   460   interpret information_space M by default fact
```
```   461   interpret v: finite_prob_space "M\<lparr>measure := \<nu>\<rparr>" by fact
```
```   462   have ps: "prob_space (M\<lparr>measure := \<nu>\<rparr>)" by default
```
```   463   from KL_ge_0[OF this ac v.integral_finite_singleton(1)] show ?thesis .
```
```   464 qed
```
```   465
```
```   466 subsection {* Mutual Information *}
```
```   467
```
```   468 definition (in prob_space)
```
```   469   "mutual_information b S T X Y =
```
```   470     KL_divergence b (S\<lparr>measure := extreal\<circ>distribution X\<rparr> \<Otimes>\<^isub>M T\<lparr>measure := extreal\<circ>distribution Y\<rparr>)
```
```   471       (extreal\<circ>joint_distribution X Y)"
```
```   472
```
```   473 lemma (in information_space)
```
```   474   fixes S T X Y
```
```   475   defines "P \<equiv> S\<lparr>measure := extreal\<circ>distribution X\<rparr> \<Otimes>\<^isub>M T\<lparr>measure := extreal\<circ>distribution Y\<rparr>"
```
```   476   shows "indep_var S X T Y \<longleftrightarrow>
```
```   477     (random_variable S X \<and> random_variable T Y \<and>
```
```   478       measure_space.absolutely_continuous P (extreal\<circ>joint_distribution X Y) \<and>
```
```   479       integrable (P\<lparr>measure := (extreal\<circ>joint_distribution X Y)\<rparr>)
```
```   480         (entropy_density b P (extreal\<circ>joint_distribution X Y)) \<and>
```
```   481      mutual_information b S T X Y = 0)"
```
```   482 proof safe
```
```   483   assume indep: "indep_var S X T Y"
```
```   484   then have "random_variable S X" "random_variable T Y"
```
```   485     by (blast dest: indep_var_rv1 indep_var_rv2)+
```
```   486   then show "sigma_algebra S" "X \<in> measurable M S" "sigma_algebra T" "Y \<in> measurable M T"
```
```   487     by blast+
```
```   488
```
```   489   interpret X: prob_space "S\<lparr>measure := extreal\<circ>distribution X\<rparr>"
```
```   490     by (rule distribution_prob_space) fact
```
```   491   interpret Y: prob_space "T\<lparr>measure := extreal\<circ>distribution Y\<rparr>"
```
```   492     by (rule distribution_prob_space) fact
```
```   493   interpret XY: pair_prob_space "S\<lparr>measure := extreal\<circ>distribution X\<rparr>" "T\<lparr>measure := extreal\<circ>distribution Y\<rparr>" by default
```
```   494   interpret XY: information_space XY.P b by default (rule b_gt_1)
```
```   495
```
```   496   let ?J = "XY.P\<lparr> measure := (extreal\<circ>joint_distribution X Y) \<rparr>"
```
```   497   { fix A assume "A \<in> sets XY.P"
```
```   498     then have "extreal (joint_distribution X Y A) = XY.\<mu> A"
```
```   499       using indep_var_distributionD[OF indep]
```
```   500       by (simp add: XY.P.finite_measure_eq) }
```
```   501   note j_eq = this
```
```   502
```
```   503   interpret J: prob_space ?J
```
```   504     using j_eq by (intro XY.prob_space_cong) auto
```
```   505
```
```   506   have ac: "XY.absolutely_continuous (extreal\<circ>joint_distribution X Y)"
```
```   507     by (simp add: XY.absolutely_continuous_def j_eq)
```
```   508   then show "measure_space.absolutely_continuous P (extreal\<circ>joint_distribution X Y)"
```
```   509     unfolding P_def .
```
```   510
```
```   511   have ed: "entropy_density b XY.P (extreal\<circ>joint_distribution X Y) \<in> borel_measurable XY.P"
```
```   512     by (rule XY.measurable_entropy_density) (default | fact)+
```
```   513
```
```   514   have "AE x in XY.P. 1 = RN_deriv XY.P (extreal\<circ>joint_distribution X Y) x"
```
```   515   proof (rule XY.RN_deriv_unique[OF _ ac])
```
```   516     show "measure_space ?J" by default
```
```   517     fix A assume "A \<in> sets XY.P"
```
```   518     then show "(extreal\<circ>joint_distribution X Y) A = (\<integral>\<^isup>+ x. 1 * indicator A x \<partial>XY.P)"
```
```   519       by (simp add: j_eq)
```
```   520   qed (insert XY.measurable_const[of 1 borel], auto)
```
```   521   then have ae_XY: "AE x in XY.P. entropy_density b XY.P (extreal\<circ>joint_distribution X Y) x = 0"
```
```   522     by (elim XY.AE_mp) (simp add: entropy_density_def)
```
```   523   have ae_J: "AE x in ?J. entropy_density b XY.P (extreal\<circ>joint_distribution X Y) x = 0"
```
```   524   proof (rule XY.absolutely_continuous_AE)
```
```   525     show "measure_space ?J" by default
```
```   526     show "XY.absolutely_continuous (measure ?J)"
```
```   527       using ac by simp
```
```   528   qed (insert ae_XY, simp_all)
```
```   529   then show "integrable (P\<lparr>measure := (extreal\<circ>joint_distribution X Y)\<rparr>)
```
```   530         (entropy_density b P (extreal\<circ>joint_distribution X Y))"
```
```   531     unfolding P_def
```
```   532     using ed XY.measurable_const[of 0 borel]
```
```   533     by (subst J.integrable_cong_AE) auto
```
```   534
```
```   535   show "mutual_information b S T X Y = 0"
```
```   536     unfolding mutual_information_def KL_divergence_def P_def
```
```   537     by (subst J.integral_cong_AE[OF ae_J]) simp
```
```   538 next
```
```   539   assume "sigma_algebra S" "X \<in> measurable M S" "sigma_algebra T" "Y \<in> measurable M T"
```
```   540   then have rvs: "random_variable S X" "random_variable T Y" by blast+
```
```   541
```
```   542   interpret X: prob_space "S\<lparr>measure := extreal\<circ>distribution X\<rparr>"
```
```   543     by (rule distribution_prob_space) fact
```
```   544   interpret Y: prob_space "T\<lparr>measure := extreal\<circ>distribution Y\<rparr>"
```
```   545     by (rule distribution_prob_space) fact
```
```   546   interpret XY: pair_prob_space "S\<lparr>measure := extreal\<circ>distribution X\<rparr>" "T\<lparr>measure := extreal\<circ>distribution Y\<rparr>" by default
```
```   547   interpret XY: information_space XY.P b by default (rule b_gt_1)
```
```   548
```
```   549   let ?J = "XY.P\<lparr> measure := (extreal\<circ>joint_distribution X Y) \<rparr>"
```
```   550   interpret J: prob_space ?J
```
```   551     using rvs by (intro joint_distribution_prob_space) auto
```
```   552
```
```   553   assume ac: "measure_space.absolutely_continuous P (extreal\<circ>joint_distribution X Y)"
```
```   554   assume int: "integrable (P\<lparr>measure := (extreal\<circ>joint_distribution X Y)\<rparr>)
```
```   555         (entropy_density b P (extreal\<circ>joint_distribution X Y))"
```
```   556   assume I_eq_0: "mutual_information b S T X Y = 0"
```
```   557
```
```   558   have eq: "\<forall>A\<in>sets XY.P. (extreal \<circ> joint_distribution X Y) A = XY.\<mu> A"
```
```   559   proof (rule XY.KL_eq_0_imp)
```
```   560     show "prob_space ?J" by default
```
```   561     show "XY.absolutely_continuous (extreal\<circ>joint_distribution X Y)"
```
```   562       using ac by (simp add: P_def)
```
```   563     show "integrable ?J (entropy_density b XY.P (extreal\<circ>joint_distribution X Y))"
```
```   564       using int by (simp add: P_def)
```
```   565     show "KL_divergence b XY.P (extreal\<circ>joint_distribution X Y) = 0"
```
```   566       using I_eq_0 unfolding mutual_information_def by (simp add: P_def)
```
```   567   qed
```
```   568
```
```   569   { fix S X assume "sigma_algebra S"
```
```   570     interpret S: sigma_algebra S by fact
```
```   571     have "Int_stable \<lparr>space = space M, sets = {X -` A \<inter> space M |A. A \<in> sets S}\<rparr>"
```
```   572     proof (safe intro!: Int_stableI)
```
```   573       fix A B assume "A \<in> sets S" "B \<in> sets S"
```
```   574       then show "\<exists>C. (X -` A \<inter> space M) \<inter> (X -` B \<inter> space M) = (X -` C \<inter> space M) \<and> C \<in> sets S"
```
```   575         by (intro exI[of _ "A \<inter> B"]) auto
```
```   576     qed }
```
```   577   note Int_stable = this
```
```   578
```
```   579   show "indep_var S X T Y" unfolding indep_var_eq
```
```   580   proof (intro conjI indep_set_sigma_sets Int_stable)
```
```   581     show "indep_set {X -` A \<inter> space M |A. A \<in> sets S} {Y -` A \<inter> space M |A. A \<in> sets T}"
```
```   582     proof (safe intro!: indep_setI)
```
```   583       { fix A assume "A \<in> sets S" then show "X -` A \<inter> space M \<in> sets M"
```
```   584         using `X \<in> measurable M S` by (auto intro: measurable_sets) }
```
```   585       { fix A assume "A \<in> sets T" then show "Y -` A \<inter> space M \<in> sets M"
```
```   586         using `Y \<in> measurable M T` by (auto intro: measurable_sets) }
```
```   587     next
```
```   588       fix A B assume ab: "A \<in> sets S" "B \<in> sets T"
```
```   589       have "extreal (prob ((X -` A \<inter> space M) \<inter> (Y -` B \<inter> space M))) =
```
```   590         extreal (joint_distribution X Y (A \<times> B))"
```
```   591         unfolding distribution_def
```
```   592         by (intro arg_cong[where f="\<lambda>C. extreal (prob C)"]) auto
```
```   593       also have "\<dots> = XY.\<mu> (A \<times> B)"
```
```   594         using ab eq by (auto simp: XY.finite_measure_eq)
```
```   595       also have "\<dots> = extreal (distribution X A) * extreal (distribution Y B)"
```
```   596         using ab by (simp add: XY.pair_measure_times)
```
```   597       finally show "prob ((X -` A \<inter> space M) \<inter> (Y -` B \<inter> space M)) =
```
```   598         prob (X -` A \<inter> space M) * prob (Y -` B \<inter> space M)"
```
```   599         unfolding distribution_def by simp
```
```   600     qed
```
```   601   qed fact+
```
```   602 qed
```
```   603
```
```   604 lemma (in information_space) mutual_information_commute_generic:
```
```   605   assumes X: "random_variable S X" and Y: "random_variable T Y"
```
```   606   assumes ac: "measure_space.absolutely_continuous
```
```   607     (S\<lparr>measure := extreal\<circ>distribution X\<rparr> \<Otimes>\<^isub>M T\<lparr>measure := extreal\<circ>distribution Y\<rparr>) (extreal\<circ>joint_distribution X Y)"
```
```   608   shows "mutual_information b S T X Y = mutual_information b T S Y X"
```
```   609 proof -
```
```   610   let ?S = "S\<lparr>measure := extreal\<circ>distribution X\<rparr>" and ?T = "T\<lparr>measure := extreal\<circ>distribution Y\<rparr>"
```
```   611   interpret S: prob_space ?S using X by (rule distribution_prob_space)
```
```   612   interpret T: prob_space ?T using Y by (rule distribution_prob_space)
```
```   613   interpret P: pair_prob_space ?S ?T ..
```
```   614   interpret Q: pair_prob_space ?T ?S ..
```
```   615   show ?thesis
```
```   616     unfolding mutual_information_def
```
```   617   proof (intro Q.KL_divergence_vimage[OF Q.measure_preserving_swap _ _ _ _ ac b_gt_1])
```
```   618     show "(\<lambda>(x,y). (y,x)) \<in> measure_preserving
```
```   619       (P.P \<lparr> measure := extreal\<circ>joint_distribution X Y\<rparr>) (Q.P \<lparr> measure := extreal\<circ>joint_distribution Y X\<rparr>)"
```
```   620       using X Y unfolding measurable_def
```
```   621       unfolding measure_preserving_def using P.pair_sigma_algebra_swap_measurable
```
```   622       by (auto simp add: space_pair_measure distribution_def intro!: arg_cong[where f=\<mu>'])
```
```   623     have "prob_space (P.P\<lparr> measure := extreal\<circ>joint_distribution X Y\<rparr>)"
```
```   624       using X Y by (auto intro!: distribution_prob_space random_variable_pairI)
```
```   625     then show "measure_space (P.P\<lparr> measure := extreal\<circ>joint_distribution X Y\<rparr>)"
```
```   626       unfolding prob_space_def by simp
```
```   627   qed auto
```
```   628 qed
```
```   629
```
```   630 definition (in prob_space)
```
```   631   "entropy b s X = mutual_information b s s X X"
```
```   632
```
```   633 abbreviation (in information_space)
```
```   634   mutual_information_Pow ("\<I>'(_ ; _')") where
```
```   635   "\<I>(X ; Y) \<equiv> mutual_information b
```
```   636     \<lparr> space = X`space M, sets = Pow (X`space M), measure = extreal\<circ>distribution X \<rparr>
```
```   637     \<lparr> space = Y`space M, sets = Pow (Y`space M), measure = extreal\<circ>distribution Y \<rparr> X Y"
```
```   638
```
```   639 lemma (in prob_space) finite_variables_absolutely_continuous:
```
```   640   assumes X: "finite_random_variable S X" and Y: "finite_random_variable T Y"
```
```   641   shows "measure_space.absolutely_continuous
```
```   642     (S\<lparr>measure := extreal\<circ>distribution X\<rparr> \<Otimes>\<^isub>M T\<lparr>measure := extreal\<circ>distribution Y\<rparr>)
```
```   643     (extreal\<circ>joint_distribution X Y)"
```
```   644 proof -
```
```   645   interpret X: finite_prob_space "S\<lparr>measure := extreal\<circ>distribution X\<rparr>"
```
```   646     using X by (rule distribution_finite_prob_space)
```
```   647   interpret Y: finite_prob_space "T\<lparr>measure := extreal\<circ>distribution Y\<rparr>"
```
```   648     using Y by (rule distribution_finite_prob_space)
```
```   649   interpret XY: pair_finite_prob_space
```
```   650     "S\<lparr>measure := extreal\<circ>distribution X\<rparr>" "T\<lparr> measure := extreal\<circ>distribution Y\<rparr>" by default
```
```   651   interpret P: finite_prob_space "XY.P\<lparr> measure := extreal\<circ>joint_distribution X Y\<rparr>"
```
```   652     using assms by (auto intro!: joint_distribution_finite_prob_space)
```
```   653   note rv = assms[THEN finite_random_variableD]
```
```   654   show "XY.absolutely_continuous (extreal\<circ>joint_distribution X Y)"
```
```   655   proof (rule XY.absolutely_continuousI)
```
```   656     show "finite_measure_space (XY.P\<lparr> measure := extreal\<circ>joint_distribution X Y\<rparr>)" by default
```
```   657     fix x assume "x \<in> space XY.P" and "XY.\<mu> {x} = 0"
```
```   658     then obtain a b where "x = (a, b)"
```
```   659       and "distribution X {a} = 0 \<or> distribution Y {b} = 0"
```
```   660       by (cases x) (auto simp: space_pair_measure)
```
```   661     with finite_distribution_order(5,6)[OF X Y]
```
```   662     show "(extreal \<circ> joint_distribution X Y) {x} = 0" by auto
```
```   663   qed
```
```   664 qed
```
```   665
```
```   666 lemma (in information_space)
```
```   667   assumes MX: "finite_random_variable MX X"
```
```   668   assumes MY: "finite_random_variable MY Y"
```
```   669   shows mutual_information_generic_eq:
```
```   670     "mutual_information b MX MY X Y = (\<Sum> (x,y) \<in> space MX \<times> space MY.
```
```   671       joint_distribution X Y {(x,y)} *
```
```   672       log b (joint_distribution X Y {(x,y)} /
```
```   673       (distribution X {x} * distribution Y {y})))"
```
```   674     (is ?sum)
```
```   675   and mutual_information_positive_generic:
```
```   676      "0 \<le> mutual_information b MX MY X Y" (is ?positive)
```
```   677 proof -
```
```   678   interpret X: finite_prob_space "MX\<lparr>measure := extreal\<circ>distribution X\<rparr>"
```
```   679     using MX by (rule distribution_finite_prob_space)
```
```   680   interpret Y: finite_prob_space "MY\<lparr>measure := extreal\<circ>distribution Y\<rparr>"
```
```   681     using MY by (rule distribution_finite_prob_space)
```
```   682   interpret XY: pair_finite_prob_space "MX\<lparr>measure := extreal\<circ>distribution X\<rparr>" "MY\<lparr>measure := extreal\<circ>distribution Y\<rparr>" by default
```
```   683   interpret P: finite_prob_space "XY.P\<lparr>measure := extreal\<circ>joint_distribution X Y\<rparr>"
```
```   684     using assms by (auto intro!: joint_distribution_finite_prob_space)
```
```   685
```
```   686   have P_ms: "finite_measure_space (XY.P\<lparr>measure := extreal\<circ>joint_distribution X Y\<rparr>)" by default
```
```   687   have P_ps: "finite_prob_space (XY.P\<lparr>measure := extreal\<circ>joint_distribution X Y\<rparr>)" by default
```
```   688
```
```   689   show ?sum
```
```   690     unfolding Let_def mutual_information_def
```
```   691     by (subst XY.KL_divergence_eq_finite[OF P_ms finite_variables_absolutely_continuous[OF MX MY]])
```
```   692        (auto simp add: space_pair_measure setsum_cartesian_product')
```
```   693
```
```   694   show ?positive
```
```   695     using XY.KL_divergence_positive_finite[OF P_ps finite_variables_absolutely_continuous[OF MX MY] b_gt_1]
```
```   696     unfolding mutual_information_def .
```
```   697 qed
```
```   698
```
```   699 lemma (in information_space) mutual_information_commute:
```
```   700   assumes X: "finite_random_variable S X" and Y: "finite_random_variable T Y"
```
```   701   shows "mutual_information b S T X Y = mutual_information b T S Y X"
```
```   702   unfolding mutual_information_generic_eq[OF X Y] mutual_information_generic_eq[OF Y X]
```
```   703   unfolding joint_distribution_commute_singleton[of X Y]
```
```   704   by (auto simp add: ac_simps intro!: setsum_reindex_cong[OF swap_inj_on])
```
```   705
```
```   706 lemma (in information_space) mutual_information_commute_simple:
```
```   707   assumes X: "simple_function M X" and Y: "simple_function M Y"
```
```   708   shows "\<I>(X;Y) = \<I>(Y;X)"
```
```   709   by (intro mutual_information_commute X Y simple_function_imp_finite_random_variable)
```
```   710
```
```   711 lemma (in information_space) mutual_information_eq:
```
```   712   assumes "simple_function M X" "simple_function M Y"
```
```   713   shows "\<I>(X;Y) = (\<Sum> (x,y) \<in> X ` space M \<times> Y ` space M.
```
```   714     distribution (\<lambda>x. (X x, Y x)) {(x,y)} * log b (distribution (\<lambda>x. (X x, Y x)) {(x,y)} /
```
```   715                                                    (distribution X {x} * distribution Y {y})))"
```
```   716   using assms by (simp add: mutual_information_generic_eq)
```
```   717
```
```   718 lemma (in information_space) mutual_information_generic_cong:
```
```   719   assumes X: "\<And>x. x \<in> space M \<Longrightarrow> X x = X' x"
```
```   720   assumes Y: "\<And>x. x \<in> space M \<Longrightarrow> Y x = Y' x"
```
```   721   shows "mutual_information b MX MY X Y = mutual_information b MX MY X' Y'"
```
```   722   unfolding mutual_information_def using X Y
```
```   723   by (simp cong: distribution_cong)
```
```   724
```
```   725 lemma (in information_space) mutual_information_cong:
```
```   726   assumes X: "\<And>x. x \<in> space M \<Longrightarrow> X x = X' x"
```
```   727   assumes Y: "\<And>x. x \<in> space M \<Longrightarrow> Y x = Y' x"
```
```   728   shows "\<I>(X; Y) = \<I>(X'; Y')"
```
```   729   unfolding mutual_information_def using X Y
```
```   730   by (simp cong: distribution_cong image_cong)
```
```   731
```
```   732 lemma (in information_space) mutual_information_positive:
```
```   733   assumes "simple_function M X" "simple_function M Y"
```
```   734   shows "0 \<le> \<I>(X;Y)"
```
```   735   using assms by (simp add: mutual_information_positive_generic)
```
```   736
```
```   737 subsection {* Entropy *}
```
```   738
```
```   739 abbreviation (in information_space)
```
```   740   entropy_Pow ("\<H>'(_')") where
```
```   741   "\<H>(X) \<equiv> entropy b \<lparr> space = X`space M, sets = Pow (X`space M), measure = extreal\<circ>distribution X \<rparr> X"
```
```   742
```
```   743 lemma (in information_space) entropy_generic_eq:
```
```   744   fixes X :: "'a \<Rightarrow> 'c"
```
```   745   assumes MX: "finite_random_variable MX X"
```
```   746   shows "entropy b MX X = -(\<Sum> x \<in> space MX. distribution X {x} * log b (distribution X {x}))"
```
```   747 proof -
```
```   748   interpret MX: finite_prob_space "MX\<lparr>measure := extreal\<circ>distribution X\<rparr>"
```
```   749     using MX by (rule distribution_finite_prob_space)
```
```   750   let "?X x" = "distribution X {x}"
```
```   751   let "?XX x y" = "joint_distribution X X {(x, y)}"
```
```   752
```
```   753   { fix x y :: 'c
```
```   754     { assume "x \<noteq> y"
```
```   755       then have "(\<lambda>x. (X x, X x)) -` {(x,y)} \<inter> space M = {}" by auto
```
```   756       then have "joint_distribution X X {(x, y)} = 0" by (simp add: distribution_def) }
```
```   757     then have "?XX x y * log b (?XX x y / (?X x * ?X y)) =
```
```   758         (if x = y then - ?X y * log b (?X y) else 0)"
```
```   759       by (auto simp: log_simps zero_less_mult_iff) }
```
```   760   note remove_XX = this
```
```   761
```
```   762   show ?thesis
```
```   763     unfolding entropy_def mutual_information_generic_eq[OF MX MX]
```
```   764     unfolding setsum_cartesian_product[symmetric] setsum_negf[symmetric] remove_XX
```
```   765     using MX.finite_space by (auto simp: setsum_cases)
```
```   766 qed
```
```   767
```
```   768 lemma (in information_space) entropy_eq:
```
```   769   assumes "simple_function M X"
```
```   770   shows "\<H>(X) = -(\<Sum> x \<in> X ` space M. distribution X {x} * log b (distribution X {x}))"
```
```   771   using assms by (simp add: entropy_generic_eq)
```
```   772
```
```   773 lemma (in information_space) entropy_positive:
```
```   774   "simple_function M X \<Longrightarrow> 0 \<le> \<H>(X)"
```
```   775   unfolding entropy_def by (simp add: mutual_information_positive)
```
```   776
```
```   777 lemma (in information_space) entropy_certainty_eq_0:
```
```   778   assumes X: "simple_function M X" and "x \<in> X ` space M" and "distribution X {x} = 1"
```
```   779   shows "\<H>(X) = 0"
```
```   780 proof -
```
```   781   let ?X = "\<lparr> space = X ` space M, sets = Pow (X ` space M), measure = extreal\<circ>distribution X\<rparr>"
```
```   782   note simple_function_imp_finite_random_variable[OF `simple_function M X`]
```
```   783   from distribution_finite_prob_space[OF this, of "\<lparr> measure = extreal\<circ>distribution X \<rparr>"]
```
```   784   interpret X: finite_prob_space ?X by simp
```
```   785   have "distribution X (X ` space M - {x}) = distribution X (X ` space M) - distribution X {x}"
```
```   786     using X.measure_compl[of "{x}"] assms by auto
```
```   787   also have "\<dots> = 0" using X.prob_space assms by auto
```
```   788   finally have X0: "distribution X (X ` space M - {x}) = 0" by auto
```
```   789   { fix y assume *: "y \<in> X ` space M"
```
```   790     { assume asm: "y \<noteq> x"
```
```   791       with * have "{y} \<subseteq> X ` space M - {x}" by auto
```
```   792       from X.measure_mono[OF this] X0 asm *
```
```   793       have "distribution X {y} = 0"  by (auto intro: antisym) }
```
```   794     then have "distribution X {y} = (if x = y then 1 else 0)"
```
```   795       using assms by auto }
```
```   796   note fi = this
```
```   797   have y: "\<And>y. (if x = y then 1 else 0) * log b (if x = y then 1 else 0) = 0" by simp
```
```   798   show ?thesis unfolding entropy_eq[OF `simple_function M X`] by (auto simp: y fi)
```
```   799 qed
```
```   800
```
```   801 lemma (in information_space) entropy_le_card_not_0:
```
```   802   assumes X: "simple_function M X"
```
```   803   shows "\<H>(X) \<le> log b (card (X ` space M \<inter> {x. distribution X {x} \<noteq> 0}))"
```
```   804 proof -
```
```   805   let "?p x" = "distribution X {x}"
```
```   806   have "\<H>(X) = (\<Sum>x\<in>X`space M. ?p x * log b (1 / ?p x))"
```
```   807     unfolding entropy_eq[OF X] setsum_negf[symmetric]
```
```   808     by (auto intro!: setsum_cong simp: log_simps)
```
```   809   also have "\<dots> \<le> log b (\<Sum>x\<in>X`space M. ?p x * (1 / ?p x))"
```
```   810     using not_empty b_gt_1 `simple_function M X` sum_over_space_real_distribution[OF X]
```
```   811     by (intro log_setsum') (auto simp: simple_function_def)
```
```   812   also have "\<dots> = log b (\<Sum>x\<in>X`space M. if ?p x \<noteq> 0 then 1 else 0)"
```
```   813     by (intro arg_cong[where f="\<lambda>X. log b X"] setsum_cong) auto
```
```   814   finally show ?thesis
```
```   815     using `simple_function M X` by (auto simp: setsum_cases real_eq_of_nat simple_function_def)
```
```   816 qed
```
```   817
```
```   818 lemma (in prob_space) measure'_translate:
```
```   819   assumes X: "random_variable S X" and A: "A \<in> sets S"
```
```   820   shows "finite_measure.\<mu>' (S\<lparr> measure := extreal\<circ>distribution X \<rparr>) A = distribution X A"
```
```   821 proof -
```
```   822   interpret S: prob_space "S\<lparr> measure := extreal\<circ>distribution X \<rparr>"
```
```   823     using distribution_prob_space[OF X] .
```
```   824   from A show "S.\<mu>' A = distribution X A"
```
```   825     unfolding S.\<mu>'_def by (simp add: distribution_def_raw \<mu>'_def)
```
```   826 qed
```
```   827
```
```   828 lemma (in information_space) entropy_uniform_max:
```
```   829   assumes X: "simple_function M X"
```
```   830   assumes "\<And>x y. \<lbrakk> x \<in> X ` space M ; y \<in> X ` space M \<rbrakk> \<Longrightarrow> distribution X {x} = distribution X {y}"
```
```   831   shows "\<H>(X) = log b (real (card (X ` space M)))"
```
```   832 proof -
```
```   833   let ?X = "\<lparr> space = X ` space M, sets = Pow (X ` space M), measure = undefined\<rparr>\<lparr> measure := extreal\<circ>distribution X\<rparr>"
```
```   834   note frv = simple_function_imp_finite_random_variable[OF X]
```
```   835   from distribution_finite_prob_space[OF this, of "\<lparr> measure = extreal\<circ>distribution X \<rparr>"]
```
```   836   interpret X: finite_prob_space ?X by simp
```
```   837   note rv = finite_random_variableD[OF frv]
```
```   838   have card_gt0: "0 < card (X ` space M)" unfolding card_gt_0_iff
```
```   839     using `simple_function M X` not_empty by (auto simp: simple_function_def)
```
```   840   { fix x assume "x \<in> space ?X"
```
```   841     moreover then have "X.\<mu>' {x} = 1 / card (space ?X)"
```
```   842     proof (rule X.uniform_prob)
```
```   843       fix x y assume "x \<in> space ?X" "y \<in> space ?X"
```
```   844       with assms(2)[of x y] show "X.\<mu>' {x} = X.\<mu>' {y}"
```
```   845         by (subst (1 2) measure'_translate[OF rv]) auto
```
```   846     qed
```
```   847     ultimately have "distribution X {x} = 1 / card (space ?X)"
```
```   848       by (subst (asm) measure'_translate[OF rv]) auto }
```
```   849   thus ?thesis
```
```   850     using not_empty X.finite_space b_gt_1 card_gt0
```
```   851     by (simp add: entropy_eq[OF `simple_function M X`] real_eq_of_nat[symmetric] log_simps)
```
```   852 qed
```
```   853
```
```   854 lemma (in information_space) entropy_le_card:
```
```   855   assumes "simple_function M X"
```
```   856   shows "\<H>(X) \<le> log b (real (card (X ` space M)))"
```
```   857 proof cases
```
```   858   assume "X ` space M \<inter> {x. distribution X {x} \<noteq> 0} = {}"
```
```   859   then have "\<And>x. x\<in>X`space M \<Longrightarrow> distribution X {x} = 0" by auto
```
```   860   moreover
```
```   861   have "0 < card (X`space M)"
```
```   862     using `simple_function M X` not_empty
```
```   863     by (auto simp: card_gt_0_iff simple_function_def)
```
```   864   then have "log b 1 \<le> log b (real (card (X`space M)))"
```
```   865     using b_gt_1 by (intro log_le) auto
```
```   866   ultimately show ?thesis using assms by (simp add: entropy_eq)
```
```   867 next
```
```   868   assume False: "X ` space M \<inter> {x. distribution X {x} \<noteq> 0} \<noteq> {}"
```
```   869   have "card (X ` space M \<inter> {x. distribution X {x} \<noteq> 0}) \<le> card (X ` space M)"
```
```   870     (is "?A \<le> ?B") using assms not_empty by (auto intro!: card_mono simp: simple_function_def)
```
```   871   note entropy_le_card_not_0[OF assms]
```
```   872   also have "log b (real ?A) \<le> log b (real ?B)"
```
```   873     using b_gt_1 False not_empty `?A \<le> ?B` assms
```
```   874     by (auto intro!: log_le simp: card_gt_0_iff simp: simple_function_def)
```
```   875   finally show ?thesis .
```
```   876 qed
```
```   877
```
```   878 lemma (in information_space) entropy_commute:
```
```   879   assumes "simple_function M X" "simple_function M Y"
```
```   880   shows "\<H>(\<lambda>x. (X x, Y x)) = \<H>(\<lambda>x. (Y x, X x))"
```
```   881 proof -
```
```   882   have sf: "simple_function M (\<lambda>x. (X x, Y x))" "simple_function M (\<lambda>x. (Y x, X x))"
```
```   883     using assms by (auto intro: simple_function_Pair)
```
```   884   have *: "(\<lambda>x. (Y x, X x))`space M = (\<lambda>(a,b). (b,a))`(\<lambda>x. (X x, Y x))`space M"
```
```   885     by auto
```
```   886   have inj: "\<And>X. inj_on (\<lambda>(a,b). (b,a)) X"
```
```   887     by (auto intro!: inj_onI)
```
```   888   show ?thesis
```
```   889     unfolding sf[THEN entropy_eq] unfolding * setsum_reindex[OF inj]
```
```   890     by (simp add: joint_distribution_commute[of Y X] split_beta)
```
```   891 qed
```
```   892
```
```   893 lemma (in information_space) entropy_eq_cartesian_product:
```
```   894   assumes "simple_function M X" "simple_function M Y"
```
```   895   shows "\<H>(\<lambda>x. (X x, Y x)) = -(\<Sum>x\<in>X`space M. \<Sum>y\<in>Y`space M.
```
```   896     joint_distribution X Y {(x,y)} * log b (joint_distribution X Y {(x,y)}))"
```
```   897 proof -
```
```   898   have sf: "simple_function M (\<lambda>x. (X x, Y x))"
```
```   899     using assms by (auto intro: simple_function_Pair)
```
```   900   { fix x assume "x\<notin>(\<lambda>x. (X x, Y x))`space M"
```
```   901     then have "(\<lambda>x. (X x, Y x)) -` {x} \<inter> space M = {}" by auto
```
```   902     then have "joint_distribution X Y {x} = 0"
```
```   903       unfolding distribution_def by auto }
```
```   904   then show ?thesis using sf assms
```
```   905     unfolding entropy_eq[OF sf] neg_equal_iff_equal setsum_cartesian_product
```
```   906     by (auto intro!: setsum_mono_zero_cong_left simp: simple_function_def)
```
```   907 qed
```
```   908
```
```   909 subsection {* Conditional Mutual Information *}
```
```   910
```
```   911 definition (in prob_space)
```
```   912   "conditional_mutual_information b MX MY MZ X Y Z \<equiv>
```
```   913     mutual_information b MX (MY \<Otimes>\<^isub>M MZ) X (\<lambda>x. (Y x, Z x)) -
```
```   914     mutual_information b MX MZ X Z"
```
```   915
```
```   916 abbreviation (in information_space)
```
```   917   conditional_mutual_information_Pow ("\<I>'( _ ; _ | _ ')") where
```
```   918   "\<I>(X ; Y | Z) \<equiv> conditional_mutual_information b
```
```   919     \<lparr> space = X`space M, sets = Pow (X`space M), measure = extreal\<circ>distribution X \<rparr>
```
```   920     \<lparr> space = Y`space M, sets = Pow (Y`space M), measure = extreal\<circ>distribution Y \<rparr>
```
```   921     \<lparr> space = Z`space M, sets = Pow (Z`space M), measure = extreal\<circ>distribution Z \<rparr>
```
```   922     X Y Z"
```
```   923
```
```   924 lemma (in information_space) conditional_mutual_information_generic_eq:
```
```   925   assumes MX: "finite_random_variable MX X"
```
```   926     and MY: "finite_random_variable MY Y"
```
```   927     and MZ: "finite_random_variable MZ Z"
```
```   928   shows "conditional_mutual_information b MX MY MZ X Y Z = (\<Sum>(x, y, z) \<in> space MX \<times> space MY \<times> space MZ.
```
```   929              distribution (\<lambda>x. (X x, Y x, Z x)) {(x, y, z)} *
```
```   930              log b (distribution (\<lambda>x. (X x, Y x, Z x)) {(x, y, z)} /
```
```   931     (joint_distribution X Z {(x, z)} * (joint_distribution Y Z {(y,z)} / distribution Z {z}))))"
```
```   932   (is "_ = (\<Sum>(x, y, z)\<in>?S. ?XYZ x y z * log b (?XYZ x y z / (?XZ x z * (?YZ y z / ?Z z))))")
```
```   933 proof -
```
```   934   let ?X = "\<lambda>x. distribution X {x}"
```
```   935   note finite_var = MX MY MZ
```
```   936   note YZ = finite_random_variable_pairI[OF finite_var(2,3)]
```
```   937   note XYZ = finite_random_variable_pairI[OF MX YZ]
```
```   938   note XZ = finite_random_variable_pairI[OF finite_var(1,3)]
```
```   939   note ZX = finite_random_variable_pairI[OF finite_var(3,1)]
```
```   940   note YZX = finite_random_variable_pairI[OF finite_var(2) ZX]
```
```   941   note order1 =
```
```   942     finite_distribution_order(5,6)[OF finite_var(1) YZ]
```
```   943     finite_distribution_order(5,6)[OF finite_var(1,3)]
```
```   944
```
```   945   note random_var = finite_var[THEN finite_random_variableD]
```
```   946   note finite = finite_var(1) YZ finite_var(3) XZ YZX
```
```   947
```
```   948   have order2: "\<And>x y z. \<lbrakk>x \<in> space MX; y \<in> space MY; z \<in> space MZ; joint_distribution X Z {(x, z)} = 0\<rbrakk>
```
```   949           \<Longrightarrow> joint_distribution X (\<lambda>x. (Y x, Z x)) {(x, y, z)} = 0"
```
```   950     unfolding joint_distribution_commute_singleton[of X]
```
```   951     unfolding joint_distribution_assoc_singleton[symmetric]
```
```   952     using finite_distribution_order(6)[OF finite_var(2) ZX]
```
```   953     by auto
```
```   954
```
```   955   have "(\<Sum>(x, y, z)\<in>?S. ?XYZ x y z * log b (?XYZ x y z / (?XZ x z * (?YZ y z / ?Z z)))) =
```
```   956     (\<Sum>(x, y, z)\<in>?S. ?XYZ x y z * (log b (?XYZ x y z / (?X x * ?YZ y z)) - log b (?XZ x z / (?X x * ?Z z))))"
```
```   957     (is "(\<Sum>(x, y, z)\<in>?S. ?L x y z) = (\<Sum>(x, y, z)\<in>?S. ?R x y z)")
```
```   958   proof (safe intro!: setsum_cong)
```
```   959     fix x y z assume space: "x \<in> space MX" "y \<in> space MY" "z \<in> space MZ"
```
```   960     show "?L x y z = ?R x y z"
```
```   961     proof cases
```
```   962       assume "?XYZ x y z \<noteq> 0"
```
```   963       with space have "0 < ?X x" "0 < ?Z z" "0 < ?XZ x z" "0 < ?YZ y z" "0 < ?XYZ x y z"
```
```   964         using order1 order2 by (auto simp: less_le)
```
```   965       with b_gt_1 show ?thesis
```
```   966         by (simp add: log_mult log_divide zero_less_mult_iff zero_less_divide_iff)
```
```   967     qed simp
```
```   968   qed
```
```   969   also have "\<dots> = (\<Sum>(x, y, z)\<in>?S. ?XYZ x y z * log b (?XYZ x y z / (?X x * ?YZ y z))) -
```
```   970                   (\<Sum>(x, y, z)\<in>?S. ?XYZ x y z * log b (?XZ x z / (?X x * ?Z z)))"
```
```   971     by (auto simp add: setsum_subtractf[symmetric] field_simps intro!: setsum_cong)
```
```   972   also have "(\<Sum>(x, y, z)\<in>?S. ?XYZ x y z * log b (?XZ x z / (?X x * ?Z z))) =
```
```   973              (\<Sum>(x, z)\<in>space MX \<times> space MZ. ?XZ x z * log b (?XZ x z / (?X x * ?Z z)))"
```
```   974     unfolding setsum_cartesian_product[symmetric] setsum_commute[of _ _ "space MY"]
```
```   975               setsum_left_distrib[symmetric]
```
```   976     unfolding joint_distribution_commute_singleton[of X]
```
```   977     unfolding joint_distribution_assoc_singleton[symmetric]
```
```   978     using setsum_joint_distribution_singleton[OF finite_var(2) ZX]
```
```   979     by (intro setsum_cong refl) (simp add: space_pair_measure)
```
```   980   also have "(\<Sum>(x, y, z)\<in>?S. ?XYZ x y z * log b (?XYZ x y z / (?X x * ?YZ y z))) -
```
```   981              (\<Sum>(x, z)\<in>space MX \<times> space MZ. ?XZ x z * log b (?XZ x z / (?X x * ?Z z))) =
```
```   982              conditional_mutual_information b MX MY MZ X Y Z"
```
```   983     unfolding conditional_mutual_information_def
```
```   984     unfolding mutual_information_generic_eq[OF finite_var(1,3)]
```
```   985     unfolding mutual_information_generic_eq[OF finite_var(1) YZ]
```
```   986     by (simp add: space_sigma space_pair_measure setsum_cartesian_product')
```
```   987   finally show ?thesis by simp
```
```   988 qed
```
```   989
```
```   990 lemma (in information_space) conditional_mutual_information_eq:
```
```   991   assumes "simple_function M X" "simple_function M Y" "simple_function M Z"
```
```   992   shows "\<I>(X;Y|Z) = (\<Sum>(x, y, z) \<in> X`space M \<times> Y`space M \<times> Z`space M.
```
```   993              distribution (\<lambda>x. (X x, Y x, Z x)) {(x, y, z)} *
```
```   994              log b (distribution (\<lambda>x. (X x, Y x, Z x)) {(x, y, z)} /
```
```   995     (joint_distribution X Z {(x, z)} * joint_distribution Y Z {(y,z)} / distribution Z {z})))"
```
```   996   by (subst conditional_mutual_information_generic_eq[OF assms[THEN simple_function_imp_finite_random_variable]])
```
```   997      simp
```
```   998
```
```   999 lemma (in information_space) conditional_mutual_information_eq_mutual_information:
```
```  1000   assumes X: "simple_function M X" and Y: "simple_function M Y"
```
```  1001   shows "\<I>(X ; Y) = \<I>(X ; Y | (\<lambda>x. ()))"
```
```  1002 proof -
```
```  1003   have [simp]: "(\<lambda>x. ()) ` space M = {()}" using not_empty by auto
```
```  1004   have C: "simple_function M (\<lambda>x. ())" by auto
```
```  1005   show ?thesis
```
```  1006     unfolding conditional_mutual_information_eq[OF X Y C]
```
```  1007     unfolding mutual_information_eq[OF X Y]
```
```  1008     by (simp add: setsum_cartesian_product' distribution_remove_const)
```
```  1009 qed
```
```  1010
```
```  1011 lemma (in prob_space) distribution_unit[simp]: "distribution (\<lambda>x. ()) {()} = 1"
```
```  1012   unfolding distribution_def using prob_space by auto
```
```  1013
```
```  1014 lemma (in prob_space) joint_distribution_unit[simp]: "distribution (\<lambda>x. (X x, ())) {(a, ())} = distribution X {a}"
```
```  1015   unfolding distribution_def by (auto intro!: arg_cong[where f=\<mu>'])
```
```  1016
```
```  1017 lemma (in prob_space) setsum_distribution:
```
```  1018   assumes X: "finite_random_variable MX X" shows "(\<Sum>a\<in>space MX. distribution X {a}) = 1"
```
```  1019   using setsum_joint_distribution[OF assms, of "\<lparr> space = UNIV, sets = Pow UNIV \<rparr>" "\<lambda>x. ()" "{()}"]
```
```  1020   using sigma_algebra_Pow[of "UNIV::unit set" "()"] by simp
```
```  1021
```
```  1022 lemma (in prob_space) setsum_real_distribution:
```
```  1023   fixes MX :: "('c, 'd) measure_space_scheme"
```
```  1024   assumes X: "finite_random_variable MX X" shows "(\<Sum>a\<in>space MX. distribution X {a}) = 1"
```
```  1025   using setsum_joint_distribution[OF assms, of "\<lparr> space = UNIV, sets = Pow UNIV, measure = undefined \<rparr>" "\<lambda>x. ()" "{()}"]
```
```  1026   using sigma_algebra_Pow[of "UNIV::unit set" "\<lparr> measure = undefined \<rparr>"]
```
```  1027   by auto
```
```  1028
```
```  1029 lemma (in information_space) conditional_mutual_information_generic_positive:
```
```  1030   assumes X: "finite_random_variable MX X" and Y: "finite_random_variable MY Y" and Z: "finite_random_variable MZ Z"
```
```  1031   shows "0 \<le> conditional_mutual_information b MX MY MZ X Y Z"
```
```  1032 proof (cases "space MX \<times> space MY \<times> space MZ = {}")
```
```  1033   case True show ?thesis
```
```  1034     unfolding conditional_mutual_information_generic_eq[OF assms] True
```
```  1035     by simp
```
```  1036 next
```
```  1037   case False
```
```  1038   let ?dXYZ = "distribution (\<lambda>x. (X x, Y x, Z x))"
```
```  1039   let ?dXZ = "joint_distribution X Z"
```
```  1040   let ?dYZ = "joint_distribution Y Z"
```
```  1041   let ?dX = "distribution X"
```
```  1042   let ?dZ = "distribution Z"
```
```  1043   let ?M = "space MX \<times> space MY \<times> space MZ"
```
```  1044
```
```  1045   note YZ = finite_random_variable_pairI[OF Y Z]
```
```  1046   note XZ = finite_random_variable_pairI[OF X Z]
```
```  1047   note ZX = finite_random_variable_pairI[OF Z X]
```
```  1048   note YZ = finite_random_variable_pairI[OF Y Z]
```
```  1049   note XYZ = finite_random_variable_pairI[OF X YZ]
```
```  1050   note finite = Z YZ XZ XYZ
```
```  1051   have order: "\<And>x y z. \<lbrakk>x \<in> space MX; y \<in> space MY; z \<in> space MZ; joint_distribution X Z {(x, z)} = 0\<rbrakk>
```
```  1052           \<Longrightarrow> joint_distribution X (\<lambda>x. (Y x, Z x)) {(x, y, z)} = 0"
```
```  1053     unfolding joint_distribution_commute_singleton[of X]
```
```  1054     unfolding joint_distribution_assoc_singleton[symmetric]
```
```  1055     using finite_distribution_order(6)[OF Y ZX]
```
```  1056     by auto
```
```  1057
```
```  1058   note order = order
```
```  1059     finite_distribution_order(5,6)[OF X YZ]
```
```  1060     finite_distribution_order(5,6)[OF Y Z]
```
```  1061
```
```  1062   have "- conditional_mutual_information b MX MY MZ X Y Z = - (\<Sum>(x, y, z) \<in> ?M. ?dXYZ {(x, y, z)} *
```
```  1063     log b (?dXYZ {(x, y, z)} / (?dXZ {(x, z)} * ?dYZ {(y,z)} / ?dZ {z})))"
```
```  1064     unfolding conditional_mutual_information_generic_eq[OF assms] neg_equal_iff_equal by auto
```
```  1065   also have "\<dots> \<le> log b (\<Sum>(x, y, z) \<in> ?M. ?dXZ {(x, z)} * ?dYZ {(y,z)} / ?dZ {z})"
```
```  1066     unfolding split_beta'
```
```  1067   proof (rule log_setsum_divide)
```
```  1068     show "?M \<noteq> {}" using False by simp
```
```  1069     show "1 < b" using b_gt_1 .
```
```  1070
```
```  1071     show "finite ?M" using assms
```
```  1072       unfolding finite_sigma_algebra_def finite_sigma_algebra_axioms_def by auto
```
```  1073
```
```  1074     show "(\<Sum>x\<in>?M. ?dXYZ {(fst x, fst (snd x), snd (snd x))}) = 1"
```
```  1075       unfolding setsum_cartesian_product'
```
```  1076       unfolding setsum_commute[of _ "space MY"]
```
```  1077       unfolding setsum_commute[of _ "space MZ"]
```
```  1078       by (simp_all add: space_pair_measure
```
```  1079                         setsum_joint_distribution_singleton[OF X YZ]
```
```  1080                         setsum_joint_distribution_singleton[OF Y Z]
```
```  1081                         setsum_distribution[OF Z])
```
```  1082
```
```  1083     fix x assume "x \<in> ?M"
```
```  1084     let ?x = "(fst x, fst (snd x), snd (snd x))"
```
```  1085
```
```  1086     show "0 \<le> ?dXYZ {?x}"
```
```  1087       "0 \<le> ?dXZ {(fst x, snd (snd x))} * ?dYZ {(fst (snd x), snd (snd x))} / ?dZ {snd (snd x)}"
```
```  1088      by (simp_all add: mult_nonneg_nonneg divide_nonneg_nonneg)
```
```  1089
```
```  1090     assume *: "0 < ?dXYZ {?x}"
```
```  1091     with `x \<in> ?M` finite order show "0 < ?dXZ {(fst x, snd (snd x))} * ?dYZ {(fst (snd x), snd (snd x))} / ?dZ {snd (snd x)}"
```
```  1092       by (cases x) (auto simp add: zero_le_mult_iff zero_le_divide_iff less_le)
```
```  1093   qed
```
```  1094   also have "(\<Sum>(x, y, z) \<in> ?M. ?dXZ {(x, z)} * ?dYZ {(y,z)} / ?dZ {z}) = (\<Sum>z\<in>space MZ. ?dZ {z})"
```
```  1095     apply (simp add: setsum_cartesian_product')
```
```  1096     apply (subst setsum_commute)
```
```  1097     apply (subst (2) setsum_commute)
```
```  1098     by (auto simp: setsum_divide_distrib[symmetric] setsum_product[symmetric]
```
```  1099                    setsum_joint_distribution_singleton[OF X Z]
```
```  1100                    setsum_joint_distribution_singleton[OF Y Z]
```
```  1101           intro!: setsum_cong)
```
```  1102   also have "log b (\<Sum>z\<in>space MZ. ?dZ {z}) = 0"
```
```  1103     unfolding setsum_real_distribution[OF Z] by simp
```
```  1104   finally show ?thesis by simp
```
```  1105 qed
```
```  1106
```
```  1107 lemma (in information_space) conditional_mutual_information_positive:
```
```  1108   assumes "simple_function M X" and "simple_function M Y" and "simple_function M Z"
```
```  1109   shows "0 \<le> \<I>(X;Y|Z)"
```
```  1110   by (rule conditional_mutual_information_generic_positive[OF assms[THEN simple_function_imp_finite_random_variable]])
```
```  1111
```
```  1112 subsection {* Conditional Entropy *}
```
```  1113
```
```  1114 definition (in prob_space)
```
```  1115   "conditional_entropy b S T X Y = conditional_mutual_information b S S T X X Y"
```
```  1116
```
```  1117 abbreviation (in information_space)
```
```  1118   conditional_entropy_Pow ("\<H>'(_ | _')") where
```
```  1119   "\<H>(X | Y) \<equiv> conditional_entropy b
```
```  1120     \<lparr> space = X`space M, sets = Pow (X`space M), measure = extreal\<circ>distribution X \<rparr>
```
```  1121     \<lparr> space = Y`space M, sets = Pow (Y`space M), measure = extreal\<circ>distribution Y \<rparr> X Y"
```
```  1122
```
```  1123 lemma (in information_space) conditional_entropy_positive:
```
```  1124   "simple_function M X \<Longrightarrow> simple_function M Y \<Longrightarrow> 0 \<le> \<H>(X | Y)"
```
```  1125   unfolding conditional_entropy_def by (auto intro!: conditional_mutual_information_positive)
```
```  1126
```
```  1127 lemma (in information_space) conditional_entropy_generic_eq:
```
```  1128   fixes MX :: "('c, 'd) measure_space_scheme" and MY :: "('e, 'f) measure_space_scheme"
```
```  1129   assumes MX: "finite_random_variable MX X"
```
```  1130   assumes MZ: "finite_random_variable MZ Z"
```
```  1131   shows "conditional_entropy b MX MZ X Z =
```
```  1132      - (\<Sum>(x, z)\<in>space MX \<times> space MZ.
```
```  1133          joint_distribution X Z {(x, z)} * log b (joint_distribution X Z {(x, z)} / distribution Z {z}))"
```
```  1134 proof -
```
```  1135   interpret MX: finite_sigma_algebra MX using MX by simp
```
```  1136   interpret MZ: finite_sigma_algebra MZ using MZ by simp
```
```  1137   let "?XXZ x y z" = "joint_distribution X (\<lambda>x. (X x, Z x)) {(x, y, z)}"
```
```  1138   let "?XZ x z" = "joint_distribution X Z {(x, z)}"
```
```  1139   let "?Z z" = "distribution Z {z}"
```
```  1140   let "?f x y z" = "log b (?XXZ x y z * ?Z z / (?XZ x z * ?XZ y z))"
```
```  1141   { fix x z have "?XXZ x x z = ?XZ x z"
```
```  1142       unfolding distribution_def by (auto intro!: arg_cong[where f=\<mu>']) }
```
```  1143   note this[simp]
```
```  1144   { fix x x' :: 'c and z assume "x' \<noteq> x"
```
```  1145     then have "?XXZ x x' z = 0"
```
```  1146       by (auto simp: distribution_def empty_measure'[symmetric]
```
```  1147                simp del: empty_measure' intro!: arg_cong[where f=\<mu>']) }
```
```  1148   note this[simp]
```
```  1149   { fix x x' z assume *: "x \<in> space MX" "z \<in> space MZ"
```
```  1150     then have "(\<Sum>x'\<in>space MX. ?XXZ x x' z * ?f x x' z)
```
```  1151       = (\<Sum>x'\<in>space MX. if x = x' then ?XZ x z * ?f x x z else 0)"
```
```  1152       by (auto intro!: setsum_cong)
```
```  1153     also have "\<dots> = ?XZ x z * ?f x x z"
```
```  1154       using `x \<in> space MX` by (simp add: setsum_cases[OF MX.finite_space])
```
```  1155     also have "\<dots> = ?XZ x z * log b (?Z z / ?XZ x z)" by auto
```
```  1156     also have "\<dots> = - ?XZ x z * log b (?XZ x z / ?Z z)"
```
```  1157       using finite_distribution_order(6)[OF MX MZ]
```
```  1158       by (auto simp: log_simps field_simps zero_less_mult_iff)
```
```  1159     finally have "(\<Sum>x'\<in>space MX. ?XXZ x x' z * ?f x x' z) = - ?XZ x z * log b (?XZ x z / ?Z z)" . }
```
```  1160   note * = this
```
```  1161   show ?thesis
```
```  1162     unfolding conditional_entropy_def
```
```  1163     unfolding conditional_mutual_information_generic_eq[OF MX MX MZ]
```
```  1164     by (auto simp: setsum_cartesian_product' setsum_negf[symmetric]
```
```  1165                    setsum_commute[of _ "space MZ"] *
```
```  1166              intro!: setsum_cong)
```
```  1167 qed
```
```  1168
```
```  1169 lemma (in information_space) conditional_entropy_eq:
```
```  1170   assumes "simple_function M X" "simple_function M Z"
```
```  1171   shows "\<H>(X | Z) =
```
```  1172      - (\<Sum>(x, z)\<in>X ` space M \<times> Z ` space M.
```
```  1173          joint_distribution X Z {(x, z)} *
```
```  1174          log b (joint_distribution X Z {(x, z)} / distribution Z {z}))"
```
```  1175   by (subst conditional_entropy_generic_eq[OF assms[THEN simple_function_imp_finite_random_variable]])
```
```  1176      simp
```
```  1177
```
```  1178 lemma (in information_space) conditional_entropy_eq_ce_with_hypothesis:
```
```  1179   assumes X: "simple_function M X" and Y: "simple_function M Y"
```
```  1180   shows "\<H>(X | Y) =
```
```  1181     -(\<Sum>y\<in>Y`space M. distribution Y {y} *
```
```  1182       (\<Sum>x\<in>X`space M. joint_distribution X Y {(x,y)} / distribution Y {(y)} *
```
```  1183               log b (joint_distribution X Y {(x,y)} / distribution Y {(y)})))"
```
```  1184   unfolding conditional_entropy_eq[OF assms]
```
```  1185   using finite_distribution_order(5,6)[OF assms[THEN simple_function_imp_finite_random_variable]]
```
```  1186   by (auto simp: setsum_cartesian_product'  setsum_commute[of _ "Y`space M"] setsum_right_distrib
```
```  1187            intro!: setsum_cong)
```
```  1188
```
```  1189 lemma (in information_space) conditional_entropy_eq_cartesian_product:
```
```  1190   assumes "simple_function M X" "simple_function M Y"
```
```  1191   shows "\<H>(X | Y) = -(\<Sum>x\<in>X`space M. \<Sum>y\<in>Y`space M.
```
```  1192     joint_distribution X Y {(x,y)} *
```
```  1193     log b (joint_distribution X Y {(x,y)} / distribution Y {y}))"
```
```  1194   unfolding conditional_entropy_eq[OF assms]
```
```  1195   by (auto intro!: setsum_cong simp: setsum_cartesian_product')
```
```  1196
```
```  1197 subsection {* Equalities *}
```
```  1198
```
```  1199 lemma (in information_space) mutual_information_eq_entropy_conditional_entropy:
```
```  1200   assumes X: "simple_function M X" and Z: "simple_function M Z"
```
```  1201   shows  "\<I>(X ; Z) = \<H>(X) - \<H>(X | Z)"
```
```  1202 proof -
```
```  1203   let "?XZ x z" = "joint_distribution X Z {(x, z)}"
```
```  1204   let "?Z z" = "distribution Z {z}"
```
```  1205   let "?X x" = "distribution X {x}"
```
```  1206   note fX = X[THEN simple_function_imp_finite_random_variable]
```
```  1207   note fZ = Z[THEN simple_function_imp_finite_random_variable]
```
```  1208   note finite_distribution_order[OF fX fZ, simp]
```
```  1209   { fix x z assume "x \<in> X`space M" "z \<in> Z`space M"
```
```  1210     have "?XZ x z * log b (?XZ x z / (?X x * ?Z z)) =
```
```  1211           ?XZ x z * log b (?XZ x z / ?Z z) - ?XZ x z * log b (?X x)"
```
```  1212       by (auto simp: log_simps zero_le_mult_iff field_simps less_le) }
```
```  1213   note * = this
```
```  1214   show ?thesis
```
```  1215     unfolding entropy_eq[OF X] conditional_entropy_eq[OF X Z] mutual_information_eq[OF X Z]
```
```  1216     using setsum_joint_distribution_singleton[OF fZ fX, unfolded joint_distribution_commute_singleton[of Z X]]
```
```  1217     by (simp add: * setsum_cartesian_product' setsum_subtractf setsum_left_distrib[symmetric]
```
```  1218                      setsum_distribution)
```
```  1219 qed
```
```  1220
```
```  1221 lemma (in information_space) conditional_entropy_less_eq_entropy:
```
```  1222   assumes X: "simple_function M X" and Z: "simple_function M Z"
```
```  1223   shows "\<H>(X | Z) \<le> \<H>(X)"
```
```  1224 proof -
```
```  1225   have "\<I>(X ; Z) = \<H>(X) - \<H>(X | Z)" using mutual_information_eq_entropy_conditional_entropy[OF assms] .
```
```  1226   with mutual_information_positive[OF X Z] entropy_positive[OF X]
```
```  1227   show ?thesis by auto
```
```  1228 qed
```
```  1229
```
```  1230 lemma (in information_space) entropy_chain_rule:
```
```  1231   assumes X: "simple_function M X" and Y: "simple_function M Y"
```
```  1232   shows  "\<H>(\<lambda>x. (X x, Y x)) = \<H>(X) + \<H>(Y|X)"
```
```  1233 proof -
```
```  1234   let "?XY x y" = "joint_distribution X Y {(x, y)}"
```
```  1235   let "?Y y" = "distribution Y {y}"
```
```  1236   let "?X x" = "distribution X {x}"
```
```  1237   note fX = X[THEN simple_function_imp_finite_random_variable]
```
```  1238   note fY = Y[THEN simple_function_imp_finite_random_variable]
```
```  1239   note finite_distribution_order[OF fX fY, simp]
```
```  1240   { fix x y assume "x \<in> X`space M" "y \<in> Y`space M"
```
```  1241     have "?XY x y * log b (?XY x y / ?X x) =
```
```  1242           ?XY x y * log b (?XY x y) - ?XY x y * log b (?X x)"
```
```  1243       by (auto simp: log_simps zero_le_mult_iff field_simps less_le) }
```
```  1244   note * = this
```
```  1245   show ?thesis
```
```  1246     using setsum_joint_distribution_singleton[OF fY fX]
```
```  1247     unfolding entropy_eq[OF X] conditional_entropy_eq_cartesian_product[OF Y X] entropy_eq_cartesian_product[OF X Y]
```
```  1248     unfolding joint_distribution_commute_singleton[of Y X] setsum_commute[of _ "X`space M"]
```
```  1249     by (simp add: * setsum_subtractf setsum_left_distrib[symmetric])
```
```  1250 qed
```
```  1251
```
```  1252 section {* Partitioning *}
```
```  1253
```
```  1254 definition "subvimage A f g \<longleftrightarrow> (\<forall>x \<in> A. f -` {f x} \<inter> A \<subseteq> g -` {g x} \<inter> A)"
```
```  1255
```
```  1256 lemma subvimageI:
```
```  1257   assumes "\<And>x y. \<lbrakk> x \<in> A ; y \<in> A ; f x = f y \<rbrakk> \<Longrightarrow> g x = g y"
```
```  1258   shows "subvimage A f g"
```
```  1259   using assms unfolding subvimage_def by blast
```
```  1260
```
```  1261 lemma subvimageE[consumes 1]:
```
```  1262   assumes "subvimage A f g"
```
```  1263   obtains "\<And>x y. \<lbrakk> x \<in> A ; y \<in> A ; f x = f y \<rbrakk> \<Longrightarrow> g x = g y"
```
```  1264   using assms unfolding subvimage_def by blast
```
```  1265
```
```  1266 lemma subvimageD:
```
```  1267   "\<lbrakk> subvimage A f g ; x \<in> A ; y \<in> A ; f x = f y \<rbrakk> \<Longrightarrow> g x = g y"
```
```  1268   using assms unfolding subvimage_def by blast
```
```  1269
```
```  1270 lemma subvimage_subset:
```
```  1271   "\<lbrakk> subvimage B f g ; A \<subseteq> B \<rbrakk> \<Longrightarrow> subvimage A f g"
```
```  1272   unfolding subvimage_def by auto
```
```  1273
```
```  1274 lemma subvimage_idem[intro]: "subvimage A g g"
```
```  1275   by (safe intro!: subvimageI)
```
```  1276
```
```  1277 lemma subvimage_comp_finer[intro]:
```
```  1278   assumes svi: "subvimage A g h"
```
```  1279   shows "subvimage A g (f \<circ> h)"
```
```  1280 proof (rule subvimageI, simp)
```
```  1281   fix x y assume "x \<in> A" "y \<in> A" "g x = g y"
```
```  1282   from svi[THEN subvimageD, OF this]
```
```  1283   show "f (h x) = f (h y)" by simp
```
```  1284 qed
```
```  1285
```
```  1286 lemma subvimage_comp_gran:
```
```  1287   assumes svi: "subvimage A g h"
```
```  1288   assumes inj: "inj_on f (g ` A)"
```
```  1289   shows "subvimage A (f \<circ> g) h"
```
```  1290   by (rule subvimageI) (auto intro!: subvimageD[OF svi] simp: inj_on_iff[OF inj])
```
```  1291
```
```  1292 lemma subvimage_comp:
```
```  1293   assumes svi: "subvimage (f ` A) g h"
```
```  1294   shows "subvimage A (g \<circ> f) (h \<circ> f)"
```
```  1295   by (rule subvimageI) (auto intro!: svi[THEN subvimageD])
```
```  1296
```
```  1297 lemma subvimage_trans:
```
```  1298   assumes fg: "subvimage A f g"
```
```  1299   assumes gh: "subvimage A g h"
```
```  1300   shows "subvimage A f h"
```
```  1301   by (rule subvimageI) (auto intro!: fg[THEN subvimageD] gh[THEN subvimageD])
```
```  1302
```
```  1303 lemma subvimage_translator:
```
```  1304   assumes svi: "subvimage A f g"
```
```  1305   shows "\<exists>h. \<forall>x \<in> A. h (f x)  = g x"
```
```  1306 proof (safe intro!: exI[of _ "\<lambda>x. (THE z. z \<in> (g ` (f -` {x} \<inter> A)))"])
```
```  1307   fix x assume "x \<in> A"
```
```  1308   show "(THE x'. x' \<in> (g ` (f -` {f x} \<inter> A))) = g x"
```
```  1309     by (rule theI2[of _ "g x"])
```
```  1310       (insert `x \<in> A`, auto intro!: svi[THEN subvimageD])
```
```  1311 qed
```
```  1312
```
```  1313 lemma subvimage_translator_image:
```
```  1314   assumes svi: "subvimage A f g"
```
```  1315   shows "\<exists>h. h ` f ` A = g ` A"
```
```  1316 proof -
```
```  1317   from subvimage_translator[OF svi]
```
```  1318   obtain h where "\<And>x. x \<in> A \<Longrightarrow> h (f x) = g x" by auto
```
```  1319   thus ?thesis
```
```  1320     by (auto intro!: exI[of _ h]
```
```  1321       simp: image_compose[symmetric] comp_def cong: image_cong)
```
```  1322 qed
```
```  1323
```
```  1324 lemma subvimage_finite:
```
```  1325   assumes svi: "subvimage A f g" and fin: "finite (f`A)"
```
```  1326   shows "finite (g`A)"
```
```  1327 proof -
```
```  1328   from subvimage_translator_image[OF svi]
```
```  1329   obtain h where "g`A = h`f`A" by fastsimp
```
```  1330   with fin show "finite (g`A)" by simp
```
```  1331 qed
```
```  1332
```
```  1333 lemma subvimage_disj:
```
```  1334   assumes svi: "subvimage A f g"
```
```  1335   shows "f -` {x} \<inter> A \<subseteq> g -` {y} \<inter> A \<or>
```
```  1336       f -` {x} \<inter> g -` {y} \<inter> A = {}" (is "?sub \<or> ?dist")
```
```  1337 proof (rule disjCI)
```
```  1338   assume "\<not> ?dist"
```
```  1339   then obtain z where "z \<in> A" and "x = f z" and "y = g z" by auto
```
```  1340   thus "?sub" using svi unfolding subvimage_def by auto
```
```  1341 qed
```
```  1342
```
```  1343 lemma setsum_image_split:
```
```  1344   assumes svi: "subvimage A f g" and fin: "finite (f ` A)"
```
```  1345   shows "(\<Sum>x\<in>f`A. h x) = (\<Sum>y\<in>g`A. \<Sum>x\<in>f`(g -` {y} \<inter> A). h x)"
```
```  1346     (is "?lhs = ?rhs")
```
```  1347 proof -
```
```  1348   have "f ` A =
```
```  1349       snd ` (SIGMA x : g ` A. f ` (g -` {x} \<inter> A))"
```
```  1350       (is "_ = snd ` ?SIGMA")
```
```  1351     unfolding image_split_eq_Sigma[symmetric]
```
```  1352     by (simp add: image_compose[symmetric] comp_def)
```
```  1353   moreover
```
```  1354   have snd_inj: "inj_on snd ?SIGMA"
```
```  1355     unfolding image_split_eq_Sigma[symmetric]
```
```  1356     by (auto intro!: inj_onI subvimageD[OF svi])
```
```  1357   ultimately
```
```  1358   have "(\<Sum>x\<in>f`A. h x) = (\<Sum>(x,y)\<in>?SIGMA. h y)"
```
```  1359     by (auto simp: setsum_reindex intro: setsum_cong)
```
```  1360   also have "... = ?rhs"
```
```  1361     using subvimage_finite[OF svi fin] fin
```
```  1362     apply (subst setsum_Sigma[symmetric])
```
```  1363     by (auto intro!: finite_subset[of _ "f`A"])
```
```  1364   finally show ?thesis .
```
```  1365 qed
```
```  1366
```
```  1367 lemma (in information_space) entropy_partition:
```
```  1368   assumes sf: "simple_function M X" "simple_function M P"
```
```  1369   assumes svi: "subvimage (space M) X P"
```
```  1370   shows "\<H>(X) = \<H>(P) + \<H>(X|P)"
```
```  1371 proof -
```
```  1372   let "?XP x p" = "joint_distribution X P {(x, p)}"
```
```  1373   let "?X x" = "distribution X {x}"
```
```  1374   let "?P p" = "distribution P {p}"
```
```  1375   note fX = sf(1)[THEN simple_function_imp_finite_random_variable]
```
```  1376   note fP = sf(2)[THEN simple_function_imp_finite_random_variable]
```
```  1377   note finite_distribution_order[OF fX fP, simp]
```
```  1378   have "(\<Sum>x\<in>X ` space M. ?X x * log b (?X x)) =
```
```  1379     (\<Sum>y\<in>P `space M. \<Sum>x\<in>X ` space M. ?XP x y * log b (?XP x y))"
```
```  1380   proof (subst setsum_image_split[OF svi],
```
```  1381       safe intro!: setsum_mono_zero_cong_left imageI)
```
```  1382     show "finite (X ` space M)" "finite (X ` space M)" "finite (P ` space M)"
```
```  1383       using sf unfolding simple_function_def by auto
```
```  1384   next
```
```  1385     fix p x assume in_space: "p \<in> space M" "x \<in> space M"
```
```  1386     assume "?XP (X x) (P p) * log b (?XP (X x) (P p)) \<noteq> 0"
```
```  1387     hence "(\<lambda>x. (X x, P x)) -` {(X x, P p)} \<inter> space M \<noteq> {}" by (auto simp: distribution_def)
```
```  1388     with svi[unfolded subvimage_def, rule_format, OF `x \<in> space M`]
```
```  1389     show "x \<in> P -` {P p}" by auto
```
```  1390   next
```
```  1391     fix p x assume in_space: "p \<in> space M" "x \<in> space M"
```
```  1392     assume "P x = P p"
```
```  1393     from this[symmetric] svi[unfolded subvimage_def, rule_format, OF `x \<in> space M`]
```
```  1394     have "X -` {X x} \<inter> space M \<subseteq> P -` {P p} \<inter> space M"
```
```  1395       by auto
```
```  1396     hence "(\<lambda>x. (X x, P x)) -` {(X x, P p)} \<inter> space M = X -` {X x} \<inter> space M"
```
```  1397       by auto
```
```  1398     thus "?X (X x) * log b (?X (X x)) = ?XP (X x) (P p) * log b (?XP (X x) (P p))"
```
```  1399       by (auto simp: distribution_def)
```
```  1400   qed
```
```  1401   moreover have "\<And>x y. ?XP x y * log b (?XP x y / ?P y) =
```
```  1402       ?XP x y * log b (?XP x y) - ?XP x y * log b (?P y)"
```
```  1403     by (auto simp add: log_simps zero_less_mult_iff field_simps)
```
```  1404   ultimately show ?thesis
```
```  1405     unfolding sf[THEN entropy_eq] conditional_entropy_eq[OF sf]
```
```  1406     using setsum_joint_distribution_singleton[OF fX fP]
```
```  1407     by (simp add: setsum_cartesian_product' setsum_subtractf setsum_distribution
```
```  1408       setsum_left_distrib[symmetric] setsum_commute[where B="P`space M"])
```
```  1409 qed
```
```  1410
```
```  1411 corollary (in information_space) entropy_data_processing:
```
```  1412   assumes X: "simple_function M X" shows "\<H>(f \<circ> X) \<le> \<H>(X)"
```
```  1413 proof -
```
```  1414   note X
```
```  1415   moreover have fX: "simple_function M (f \<circ> X)" using X by auto
```
```  1416   moreover have "subvimage (space M) X (f \<circ> X)" by auto
```
```  1417   ultimately have "\<H>(X) = \<H>(f\<circ>X) + \<H>(X|f\<circ>X)" by (rule entropy_partition)
```
```  1418   then show "\<H>(f \<circ> X) \<le> \<H>(X)"
```
```  1419     by (auto intro: conditional_entropy_positive[OF X fX])
```
```  1420 qed
```
```  1421
```
```  1422 corollary (in information_space) entropy_of_inj:
```
```  1423   assumes X: "simple_function M X" and inj: "inj_on f (X`space M)"
```
```  1424   shows "\<H>(f \<circ> X) = \<H>(X)"
```
```  1425 proof (rule antisym)
```
```  1426   show "\<H>(f \<circ> X) \<le> \<H>(X)" using entropy_data_processing[OF X] .
```
```  1427 next
```
```  1428   have sf: "simple_function M (f \<circ> X)"
```
```  1429     using X by auto
```
```  1430   have "\<H>(X) = \<H>(the_inv_into (X`space M) f \<circ> (f \<circ> X))"
```
```  1431     by (auto intro!: mutual_information_cong simp: entropy_def the_inv_into_f_f[OF inj])
```
```  1432   also have "... \<le> \<H>(f \<circ> X)"
```
```  1433     using entropy_data_processing[OF sf] .
```
```  1434   finally show "\<H>(X) \<le> \<H>(f \<circ> X)" .
```
```  1435 qed
```
```  1436
```
```  1437 end
```