-
Notifications
You must be signed in to change notification settings - Fork 1
/
info_theory.py
52 lines (41 loc) · 1.25 KB
/
info_theory.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# Files for calculating information theoretic values:
# entropy_X_Y(P)
# entropy_XY_Z(P)
# mutual_info_XY_Z(P)
import numpy as np
def entropy_X(P):
"""Return the entropy of P. P must sum to 1."""
P = np.array(P)
P = np.ma.masked_array(P, P==0)
return -np.inner(np.log2(P), P)
def entropy_X_Y(P):
"""
Calculate entropy H(X|Y) with probabilites p(x,y) = P[x,y].
"""
P = np.ma.masked_array(P, P==0)
dim = np.shape(P)
P_Y = np.sum(P, 0)
return -np.sum( P * np.log2( P/np.tile(P_Y, (dim[0], 1)) ) )
def entropy_XY_Z(P):
"""
Calculate entropy H(X,Y|Z) with probabilites p(x,y,z) = P[x,y,z].
"""
P = np.ma.masked_array(P, P==0)
dim = np.shape(P)
P_Z = np.sum(np.sum(P, 0), 0)
return -np.sum( P * np.log2( P/np.tile(P_Z, (dim[0], dim[1], 1)) ) )
def mutual_info_XY_Z(P):
"""
Calculate mutual information I(x,y|z) with probabilities
p(x,y,z) = P[x,y,z].
"""
# Calculate necessary entropies
H_X_Z = entropy_X_Y(np.sum(P, 1))
H_Y_Z = entropy_X_Y(np.sum(P, 0))
H_XY_Z = entropy_XY_Z(P)
#print H_X_Z, H_Y_Z, H_XY_Z
return H_X_Z + H_Y_Z - H_XY_Z
if __name__ == '__main__':
"""Run unit tests if called."""
from tests.test_info_theory import *
unittest.main()