f <- function(x)
{
x <- x[which(x != 0)]
return(-sum(x*log(x)))
}
information_entropy <- function(A)
{
apply(A, 1, f)
apply(A, 1, f)
temp1 <- as.vector(A)
temp1 <- temp1[which(temp1 != 0)]
result <- -sum(temp1*log(temp1))
list("x的信息熵" = apply(A, 1, f),
"x的信息熵" = apply(A, 2, f),
"x,y的联合信息熵" = result)
}
A <- matrix(1/2, ncol = 2, nrow = 2)
information_entropy(A)
给出x 和 y 的联合概率,求信息熵
最新推荐文章于 2023-03-23 21:49:55 发布