https://blog.csdn.net/deephub/article/details/113806330
import numpy as np
from sklearn import metrics
from sklearn.metrics import roc_auc_score
# import precisionplt
def calculate_TP(y, y_pred):
tp = 0
for i, j in zip(y, y_pred):
if i == j == 1:
tp += 1
return tp
def calculate_TN(y, y_pred):
tn = 0
for i, j in zip(y, y_pred):
if i == j == 0:
tn += 1
return tn
def calculate_FP(y, y_pred):
fp = 0
for i, j in zip(y, y_pred):
if i == 0 and j == 1:
fp += 1
return fp
def calculate_FN(y, y_pred):
fn = 0
for i, j in zip(y, y_pred):
if i == 1 and j == 0:
fn += 1
return fn
# acc 这两个值是一样的
def calculate_accuracy_sklearn(y, y_pred):
return metrics.accuracy_score(y, y_pred)
# accu
def calculate_accuracy(y, y_pred):
tp = calculate_TP(y, y_pred)
tn = calculate_TN(y, y_pred)
fp = calculate_FP(y, y_pred)
fn = calculate_FN(y, y_pred)
return (tp+tn) / (tp+tn+fp+fn)
# 精度 Precision
def calculate_precision(y, y_pred):
tp = calculate_TP(y, y_pred)
fp = calculate_FP(y, y_pred)
return tp / (tp + fp)
# 召回率 Recall 也是 TPR 有多少被预测成正类(正类预测正确)
def calculate_recall(y, y_pred):
tp = calculate_TP(y, y_pred)
fn = calculate_FN(y, y_pred)
return tp / (tp + fn)
def precision_recall_curve(y, y_pred):
y_pred_class,precision,recall = [],[],[]
thresholds = [0.1, 0.2, 0.3, 0.6, 0.65]
for thresh in thresholds:
for i in y_pred: #y_pred holds prob value for class 1
if i>=thresh: y_pred_class.append(1)
else: y_pred_class.append(0)
precision.append(calculate_precision(y, y_pred_class))
recall.append(calculate_recall(y, y_pred_class))
return recall, precisionplt.plot(recall, precision)
# F1分数 F1结合了Precision和Recall得分,得到一个单一的数字,可以帮助直接比较不同的模型。 可以将其视为P和R的谐波均值
def calculate_F1(y, y_pred):
p = calculate_precision(y, y_pred)
r = calculate_recall(y, y_pred)
return 2*p*r / (p+r)
# AUC-ROC是用于二分类问题的非常常见的评估指标之一。 这是一条曲线,绘制在y轴的TPR(正确率)和x轴的FPR(错误率)之间,
# ROC曲线下的AUC(曲线下的面积)值越接近1,模型越好
def roc_auc(y, y_pred):
return roc_auc_score(y, y_pred)
# 所有反类中,有多少被预测成正类(正类预测错误)
def FPR(y, y_pred):
fp= calculate_FP(y, y_pred)
tn= calculate_TN(y, y_pred)
tp = calculate_TP(y, y_pred)
fn = calculate_FN(y, y_pred)
return fp / (fp + tn)
# 所有正类中,有多少被预测成反类(反类预测错误)
def FNR(y, y_pred):#
tp = calculate_TP(y, y_pred)
fn = calculate_FN(y, y_pred)
return fn / (fn + tp)
# TNR= TN / (FP + TN) , return tp / (tp + fp)
def TNR(y,y_pred):
tn = calculate_TN(y, y_pred)
fp = calculate_FP(y, y_pred)
return tn / (fp + tn)
#TPR=TP/ (TP+ FN) TPR即为敏感度(sensitivity) 也是recall
def TPR(y,y_pred):
tp = calculate_TP(y, y_pred)
fn = calculate_FN(y, y_pred)
return tp / (fn + tp)
# Recall F1_Score precision FPR假阳性率 FNR假阴性率
# AUC AUC910%CI ACC准确,TPR敏感,TNR特异度(TPR即为敏感度(sensitivity),TNR即为特异度(specificity))
y=[]
y_pred=[]
a="/home/syy/code/PaddleClas/school_pre/pred3.txt"
f=open(a)
for line in f.readlines():
line = line.strip().split()
y.append(int(line[0]))
y_pred.append(int(line[1]))
print(y)
print(y_pred)
Recall=calculate_recall(y, y_pred)
precision=calculate_precision(y, y_pred)
F1_Score=calculate_F1(y, y_pred)
FPR=FPR(y, y_pred)
FNR=FNR(y, y_pred)
auc = roc_auc(y, y_pred)
accuracy=calculate_accuracy_sklearn(y, y_pred)
TPR = TPR(y,y_pred)
TNR = TNR(y,y_pred)
print("Recall",round(Recall,4))
print("precision",round(precision,4))
print("F1_Score",round(F1_Score,4))
print("FPR",round(FPR,4))
print("FNR",round(FNR,4))
print("auc",round(auc,4))
print("accuracy",round(accuracy,4))
print("TPR",round(TPR,4))
print("TNR",round(TNR,4))