2020-11-22 逻辑回归 多分类 1.1

import math
import torch
import numpy as np

def sigmoid(x):
    '''
    ①定义sigmoid函数,只能传一位数字
    '''
    if x>700:
        x=700
    if x<-700:
        x=-700
    res=1/(1+math.exp(-x))
    return res

def mul_dui_ying(x,y):
    '''举阵对应乘法'''
    a=torch.tensor(tensor_to_list(torch.tensor(x)))
    b=torch.tensor(tensor_to_list(torch.tensor(y)))
    res=a.float().mul(b.float())
    return res


def mul_1(a,b):
    '''
    ①把两个横着的一维矩阵对应乘起来,然后相加,得数是一个数字
    ②y=a0·x0+a1·x1+…+an·xn
    '''
    a=torch.tensor([tensor_to_list(a)])
    b=torch.tensor([tensor_to_list(b)])
    res=a.float().mm(b.float().t())
    return res

def tensor_to_list(x):
    '''
    将tensor转换成list形式
    '''
    y=np.array(x)
    res=list(y)
    return res

def tensorSS_to_list(a):
    '''
    将tensor张量转换成list形式
    '''
    x=[]
    for i in np.array(a):
        x.append(list(i))
    return x

def list_zhang_liang(n,m):
    '''生成一个n x m的张量,用list表示'''
    x=[]
    for i in np.array((torch.rand(n,m))):
        x.append(list(i))
    return x



# 【文件操作】
def addwrite(filename,strr):
    with open(filename,'a+',encoding='utf-8')as f:
        f.write(str(strr))
def cleanfile(filename):
    with open(filename,'w',encoding='utf-8')as f:
        pass
    
# 【其他】
def show_in(x):
    '''展示列表内部'''
    for i in x:
        print(i)
def Logistic_Regression__in_list_out_list(i,xx,yy,aefa,xita,pinggu_qian,pinggu_xian,max_aefa,fast_break):
    '''
    输入“数据集” 和 “每一条数据集对应的标记张量”,
    输入“训练次数” 和 “每多少次输出”
    输出的内容是 “当前训练到第几次” 、“西塔张量【重要】”、“当前得分”、“两次的得分差值”
    return的是“最后的西塔张量”
    '''
    # 得到输入x,y
    x=torch.tensor(xx)
    y=torch.tensor(yy)


    xita=torch.tensor(xita)
    aefa=aefa
    pinggu_qian=pinggu_qian
    pinggu_xian=pinggu_xian

    log=[]
#     if 1:
#     try:
    sum=0 #算员
    for t in range(len(x)):#算员
        sum+=x[t]*(sigmoid(mul_1(xita,x[t]))-y[t])#算员

    xita=xita-aefa*sum

#【评估】
    pinggu_qian=pinggu_xian
    pinggu=1
    for t in range(len(x)):
        pinggu=pinggu* (sigmoid(mul_1(xita,x[t]))**y[t])*(1-sigmoid(mul_1(xita,x[t])))**(1-y[t]) 
    pinggu_xian=pinggu  

#【自动改学习率】  
    deerta=pinggu_xian-pinggu_qian
   # 【deerta为前后两次得分函数的差值 】       
    if deerta<=0:
        aefa=0.7*aefa
    if deerta>0:
        if deerta<0.000001:
            aefa= aefa*1.42
    if aefa>max_aefa:
        aefa=max_aefa
    po=tensor_to_list(xita)
# 【输出】
    q2=po
#         print(float(pinggu_xian))
    q3=float(pinggu_xian)
#         q3=(pinggu_xian)
#         q3=[str('%.07f'%i) for i in q3]
    log.append([i,q2,q3,aefa])
#                 print(times+1," | 西塔向量:",po," | 得分函数:",tensor_to_list(pinggu_xian),"| 相邻两次提高得分:",tensor_to_list(deerta)," |  \n") 
    log=log[0]
#     wrong=0
#     except:
#         wrong=1
#         pass
#         return xx,yy,aefa,tensor_to_list(xita),float(pinggu_qian),float(pinggu_xian),log
    
    # <fast_break>  
    if abs(fast_break[0]-float(pinggu_qian))<=0.0000000001:
        fast_break[1]=fast_break[1]+1
    else:
#         if i%1000==0:
#             print(fast_break[0],float(pinggu_qian))
        fast_break[0]=float(pinggu_qian)
        fast_break[1]=1
        
    return xx,yy,aefa,tensor_to_list(xita),float(pinggu_qian),float(pinggu_xian),log,fast_break

# 自动生成5类数据,数据1000条进行训练
KUAN=5
LEN=1000
# 随机生成数
X=list_zhang_liang(LEN,KUAN);
for i in range(len(X)):
    X[i][0]=1
show_in(X)
x=X
Y=list_zhang_liang(LEN,1);
Y=[int(i[0]*1000%KUAN) for i in Y]
show_in(Y)
# 设置新的变量new_y
new_y=tensorSS_to_list(torch.zeros(len(Y),KUAN).long())
new_y
# 赋值
for i in range(len(Y)):
    new_y[i][Y[i]]=1
new_y
# 取各个列的值存在yy中
yy=[]
for lie in range(KUAN):
    temp=[]
    for data in range(len(new_y)):
        temp.append(new_y[data][lie])
    yy.append(temp)
yy
logs=[]
for y in yy:
    # 得到西塔的大小,初始化西塔
    xita = (tensor_to_list(torch.rand(1,list(torch.tensor(x).shape)[1]))[0])
    aefa=1
    pinggu_qian=0
    pinggu_xian=0
    fastbreak=[0,1]
    for i in range(5000):
        x,y,aefa,xita,pinggu_qian,pinggu_xian,log,fastbreak=Logistic_Regression__in_list_out_list(i,x,y,aefa,xita,pinggu_qian,
                                                                                                   pinggu_xian,max_aefa=1,
                                                                                                   fast_break=fastbreak
                                                                                                  )
        if fastbreak[1]>=30:
            break
        if i%500==0:
            print(log)
    print(log) 
    logs.append(log)
y_yuce=[]
rate=[]
geshu=[]
for ii in range(len(logs)):
    count=0
    yuce=[]
    yuceefloat=[]
    for i in x:
#         print(i,logs[ii][1])
        yuce.append(int(sigmoid(tensor_to_list(mul_1(i,logs[ii][1])[0])[0])+0.5))
        yuceefloat.append(round(sigmoid(tensor_to_list(mul_1(i,logs[ii][1])[0])[0]),1))
    y_yuce.append(yuceefloat)
    ww=0
    rr=0
    for t in range(len(yy[ii])):
        if yy[ii][t]==1:
            count+=1
            if yy[ii][t]==yuce[t]:
                rr+=1
            else:ww+=1
    geshu.append(count)     
    print(ii,"组查全率:",rr/(rr+ww)*100,"%")
    rate.append(rr/(rr+ww))
yyyy_yuce = (torch.tensor(y_yuce).t())
yyyy_yuce
yyyy_yuce_anti=(1-(torch.tensor(y_yuce).t()))
yyyy_yuce_anti
aaa=mul_dui_ying(tensorSS_to_list(torch.tensor(tensorSS_to_list(yyyy_yuce))),rate)
bbb=mul_dui_ying(tensorSS_to_list(torch.tensor(tensorSS_to_list(yyyy_yuce_anti))),tensor_to_list((1-torch.tensor(rate))/(sum(geshu)-torch.tensor(geshu))))
gg=tensorSS_to_list(aaa+bbb)
gg
ee=[]
for v in gg:
    print(v.index(max(v)))
    ee.append(v.index(max(v)))
Y
ee
wwwwwwww=0
rrrrrrrr=0
for i in range(len(Y)):
    if Y[i]==ee[i]:
        rrrrrrrr+=1
    else:
        wwwwwwww+=1
print(rrrrrrrr,rrrrrrrr+wwwwwwww,rrrrrrrr/(rrrrrrrr+wwwwwwww)*100,"%")
        

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值