import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import torch
import numexpr as en
sns.set()
X = np.array([1,0,1,1,0])
Y = np.array([1,1,1,0,0])defcheckType(X):ifisinstance(X,(np.ndarray,list,tuple)):return torch.FloatTensor(X)elifisinstance(X,(torch.TensorType,torch.FloatType)):return X
else:print("Type Error")defNumpyProb(X,symbol,x):
n = X.size
Lambda ="{}{}{}".format("X",symbol,"x")
expr = en.evaluate(Lambda)return(expr).dot(np.ones(n))/n
defNumpyEntropy(X):
NE =0for x in np.unique(X):
PX = NumpyProb(X,'==',x)
NE +=(- PX * np.log2(PX))return NE
NumpyEntropy(X)defTorchEntropy(X):
NE =0
init_X = checkType(X)
m = torch.tensor(init_X.size())[0].float()for x in torch.unique(init_X):
PX =(init_X == x).sum().float()/m
NE +=(- PX * np.log2(PX))return NE
TorchEntropy(X)defNumpyJointEntropy(X,Y):
Xn,Yn = X.size,Y.size
init_XY = np.vstack((X,Y)).T
m,n = init_XY.shape
enumerate_ = np.array([(x,y)for x in np.unique(X)for y in np.unique(Y)])
temp = np.array([((init_XY==e).dot(np.ones(n))==n).dot(np.ones(m))/m for e in enumerate_])return(-temp*np.log2(temp)).sum()
NumpyJointEntropy(X,Y)defTorchJointEntropy(X,Y):
m = X.size
comb =lambda SET : np.array([[i,j]for i in SET[::-1]for j in SET])
element = checkType(comb(np.union1d(np.unique(X),np.unique(Y))))
XY = torch.cat([checkType(X)[:,None],checkType(Y)[:,None]],1)
PXY = torch.tensor([(((XY == e).float()).mm(torch.ones((2,1)))==2).sum().float()/m for e in element])return(-PXY*torch.log2(PXY)).sum()
TorchJointEntropy(X,Y)defMutualInfo(X,Y,keyelement="X<->Y",keyType='numpy'):if keyType =='numpy':
HX,HY,HXY = NumpyEntropy(X),NumpyEntropy(Y),NumpyJointEntropy(X,Y)elif keyType =="torch":
HX,HY,HXY = TorchEntropy(X), TorchEntropy(Y),TorchJointEntropy(X,Y)return{"X<-Y":HX-HXY,"X->Y":HY-HXY,"X<->Y":HX+HY-HXY}[keyelement]