pith. machine review for the scientific record. sign in
theorem

max_entropy_uniform

proved
show as:
view math explainer →
module
IndisputableMonolith.Information.ShannonEntropy
domain
Information
line
92 · github
papers citing
none yet

open explainer

Read the cached plain-language explainer.

open lean source

IndisputableMonolith.Information.ShannonEntropy on GitHub at line 92.

browse module

All declarations in this module, on Recognition.

explainer page

A cached Ask Recognition explainer exists for this declaration.

open explainer

depends on

used by

formal source

  89  · simp [h]
  90
  91/-- **THEOREM**: Maximum entropy is log(n) for uniform distribution. -/
  92theorem max_entropy_uniform (n : ℕ) (hn : n > 0) :
  93    shannonEntropy (uniform n hn) = Real.log n := by
  94  -- H = -n × (1/n) × log(1/n) = -log(1/n) = log(n)
  95  unfold shannonEntropy uniform
  96  simp only
  97  have hn_pos : (0 : ℝ) < n := Nat.cast_pos.mpr hn
  98  have hn_ne : (n : ℝ) ≠ 0 := ne_of_gt hn_pos
  99  have h_prob_pos : (1 : ℝ) / n > 0 := by positivity
 100  simp only [h_prob_pos, ↓reduceIte, Finset.sum_const, Finset.card_fin, nsmul_eq_mul]
 101  -- Goal: -(n * (1/n * log(1/n))) = log(n)
 102  have h_log : Real.log (1 / n) = -Real.log n := by
 103    rw [Real.log_div (by norm_num : (1 : ℝ) ≠ 0) hn_ne, Real.log_one, zero_sub]
 104  rw [h_log]
 105  have h_simp : (n : ℝ) * (1 / n * -Real.log n) = -Real.log n := by
 106    field_simp
 107  rw [h_simp]
 108  ring
 109
 110/-- **THEOREM**: Entropy is 0 for deterministic distribution. -/
 111theorem zero_entropy_deterministic {n : ℕ} (d : ProbDist n) (i : Fin n)
 112    (hdet : d.probs i = 1) (hother : ∀ j ≠ i, d.probs j = 0) :
 113    shannonEntropy d = 0 := by
 114  unfold shannonEntropy
 115  simp only [neg_eq_zero]
 116  apply Finset.sum_eq_zero
 117  intro j _
 118  by_cases heq : j = i
 119  · simp [heq, hdet, Real.log_one]
 120  · simp [hother j heq]
 121
 122/-! ## J-Cost Connection -/