pytorch实现多层网络
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
import math
#读取数据集
# Pima-Indians-Diabetes数据集
import pandas as pd
import numpy as np
xy=pd.read_csv('/home/infisa/wjht/project/pytorch_practice/diabetes.csv',delimiter=',',dtype= np.float32)
#print(xy.head())
# print(type(xy))
xy_numpy = np.array(xy) #pandas转为numpy 为了后面numpy转tensor
# print(type(xy_numpy))
x=xy_numpy[:,0:-1] #x为768*8
y=xy_numpy[:,-1].reshape(-1,1) #为了让其shape为768*1
#将numpy 转为tensor
x_data=torch.Tensor(torch.from_numpy(x))
y_data=torch.Tensor(torch.from_numpy(y))
#查看维度
print(x_data.shape) # torch.Size([768, 8])
print(y_data.shape) # torch.Size([768, 1])
class Model(nn.Module):
def __init__(self):
super(Model,self).__init__()
#定义多层神经网络
self.fc1=torch.nn.Linear(8,6)
self.fc2=torch.nn.Linear(6,4)
self.fc3=torch.nn.Linear(4,1)
def forward(self, x):
x=F.relu(self.fc1(x)) #8->6 第一层
x=F.dropout(x,p=0.5) #dropout 1
x=F.relu(self.fc2(x)) # 6->4 第二层
x=F.dropout(x,p=0.5) #dropout 2
y_pred=torch.sigmoid(self.fc3(x)) #4->1 ->sigmoid 第三层sigmoid层
return y_pred
def weight_init(m):
classname=m.__class__.__name__
if classname.find('Linear')!=-1:
print('hi')
m.weight.data=torch.randn(m.weight.data.size()[0],m.weight.data.size()[1])
m.bias.data=torch.randn(m.bias.size())[0]
model=Model()
model.apply(weight_init)
criterion=torch.nn.BCELoss() #定义损失函数 binary crosstripy
optimizer=torch.optim.SGD(model.parameters(),lr=0.01) #学习率设为0.01
Loss=[]
print(x.shape)
for epoch in range(2000):
y_pred = model(x_data)
#计算误差
loss = criterion(y_pred,y_data)
#
#prin(loss.item())
Loss.append(loss.item())
#每迭代1000次打印Lost并记录
if epoch%100 == 0:
print('[%d, %5d] loss: %.3f' %
(epoch + 1, 2000, loss.item()))
#梯度清零
optimizer.zero_grad()
#反向传播
loss.backward()
#更新梯度
optimizer.step()
#由于预测的是概率 所以需要将y_pred的值转化为和y_data一致类型的。
# y_data 为1或0(浮点数) 对于二分类,sigmoid函数值大于0.5时为1, 小于0.5时为0。
for i in range(len(y_pred)):
if(y_pred[i]>0.5):
y_pred[i] = 1.0
else:
y_pred[i] = 0.0
#print(y_pred)
print((y_pred == y_data).sum().item()/len(y_data)) # torch.Tensor.sum()函数
0.6510416666666666