500个数据,20个特征,标签为3分类,
网络架构:第一层13个神经元,第二层8个,第三层输出
第一层激活函数relu,第二层 。
import torch
import torch.nn as nn
from torch.nn import functional as F
torch.random.manual_seed(22)
x = torch.rand((500,20),dtype=torch.float32)
y = torch.randint(low=0,high=3,size=(500,1),dtype=torch.float32)
class Model(nn.Module):
def __init__(self,in_features=10,out_features=2):
"""
:param in_features: input feature count
:param out_features: output feature count
"""
super(Model, self).__init__()# copy init in nn.Module,
self.linear1 = nn.Linear(in_features,13)
self.linear2 = nn.Linear(13,8)
self.output = nn.Linear(8,out_features)
def forward(self,x):
"""
nn forward propagat
Run each layer and add activation function
:param x:
:return:
"""
z1 = self.linear1(x)
relu1 = torch.relu(z1)
z2 = self.linear2(relu1)
sigma1 = torch.sigmoid(z2)
out = self.output(sigma1)
softmax1 = F.softmax(out,dim=1)
return softmax1
if __name__ == '__main__':
# instantiation nn
torch.random.manual_seed(22)
net = Model(in_features=x.shape[1],out_features=len(y.unique()))
print(net.forward(x))
print(net.linear1.weight.shape)
print(net.linear2.weight.shape)
print(net.output.weight.shape)
关于super,Model是nn.Module的子类,会继承其父类的方法(init之外的方法),但是无法继承父类的init之内的属性,所以这里要用super。
nn.apply() # 对神经网络中的所有层,init中的所有对象都执行相同的操作
nn.parameters() # 神经网络生成的所有参数