def
definition
probJCost
show as:
view math explainer →
open explainer
Read the cached plain-language explainer.
open lean source
IndisputableMonolith.Information.ShannonEntropy on GitHub at line 126.
browse module
All declarations in this module, on Recognition.
explainer page
depends on
used by
formal source
123
124/-- The J-cost of a probability (relative to uniform).
125 For p ∈ (0,1], this measures how "surprising" the probability is. -/
126noncomputable def probJCost (p : ℝ) (hp : p > 0) (hp1 : p ≤ 1) : ℝ :=
127 -Real.log p
128
129/-- **THEOREM**: J-cost of uniform probability is log(n).
130 -log(1/n) = log(n) -/
131theorem jcost_uniform (n : ℕ) (hn : n > 0) :
132 -Real.log (1 / (n : ℝ)) = Real.log n := by
133 rw [one_div, Real.log_inv, neg_neg]
134
135/-- The total J-cost of a distribution.
136 This equals the Shannon entropy! -/
137noncomputable def totalJCost {n : ℕ} (d : ProbDist n) : ℝ :=
138 Finset.univ.sum fun i =>
139 if h : d.probs i > 0 then
140 d.probs i * probJCost (d.probs i) h (by
141 have := d.normalized
142 have hs := Finset.single_le_sum (fun j _ => d.nonneg j) (Finset.mem_univ i)
143 simp at hs
144 linarith)
145 else 0
146
147/-- **THEOREM (Shannon = J-Cost)**: Shannon entropy equals total J-cost.
148 This is the key connection! -/
149theorem shannon_equals_jcost {n : ℕ} (d : ProbDist n) :
150 shannonEntropy d = totalJCost d := by
151 -- Both compute Σ p_i × (-log p_i), just with different notation
152 -- shannonEntropy = -(Σ p*log(p)) and totalJCost = Σ p*(-log(p))
153 -- These are equal since -(p*log(p)) = p*(-log(p))
154 unfold shannonEntropy totalJCost probJCost
155 -- Goal: -(Σ if p>0 then p*log(p) else 0) = Σ if p>0 then p*(-log(p)) else 0
156 conv_lhs =>