熵(entropy)是表示随机变量不确定性的度量.所以熵越小越好
之所以Po出来是因为里面好多小函数自己找了好久 R语言用的人太少了 好痛苦
#computing ShannonEnt
CalShannonEnt<-function(dataSet){
#number of sample
numEntries = nrow(dataSet)
#labels you want to classify
labels = levels(factor(dataSet[, "labels"]))
labelCount = NULL
#creat a new vector, recorder sample number in each label
labelCount[labels] = rep(0, length(labels))
for(i in 1:numEntries){
#判断dataSet[i, "labels"]标签是否在labels中
if(dataSet[i, "labels"] %in% labels){
temp = dataSet[i, "labels"]
labelCount[temp] = labelCount[temp] + 1
}
}
t = NULL
shannonEnt = 0
for(i in 1:length(labelCount)){
t[i]= labelCount[i] * 1.0 / numEntries
shannonEnt = -t[i] * log2(t[i]) + shannonEnt
}
# labelCount = as.numeric(labelCount)
return (shannonEnt)
}
dataSet = matrix(c(1, 1, "yes", 1, 1, "yes", 1, 0, "no", 1, 0, "no", 0, 1, "no"),
byrow = T,
nrow =5)
colnames(dataSet)[3] = "labels"
s = calShannonEnt(dataSet)