通过神经网络,可以解决异或问题(证明神经网络可以解决线性不可分问题)
import torch
from torch import nn, optim
from torch.nn import functional as F
class NLP(nn.Module):
def __init__(self, input_dim, hidden_dim, num_class):
super(NLP, self).__init__()
self.linear1 = nn.Linear(input_dim, hidden_dim)
self.activate = F.relu
self.linear2 = nn.Linear(hidden_dim, num_class)
def forward(self, inputs):
hidden = self.linear1(inputs)
activation = self.activate(hidden)
outputs = self.linear2(activation)
log_probs = F.softmax(outputs, dim=1)
return log_probs
x_train = torch.tensor([[0.0, 0.0], [0.0, 1.0], [1.0, 0.0], [1.0, 1.0]])
y_train = torch.tensor([0, 1, 1, 0])
model = NLP(input_dim=2, hidden_dim=5, num_class=2)
criterion = nn.NLLLoss
criteria = nn.CrossEntropyLoss()
optimizer = optim.SGD(model.parameters(), lr=0.05) # 学习率为0.05
for epoch in range(10000): # 训练次数
y_pred = model(x_train) # 训练
loss = criteria(y_pred, y_train) # 计算损失
optimizer.zero_grad() # 将梯度置为0,防止累加
loss.backward() # 反向传播
optimizer.step() # 更新参数
print("Parameters:")
for name, param in model.named_parameters(): # 参数 值
print(name, param.data)
y_pred = model(x_train)
print(y_pred)
print("Predicted results:", y_pred.argmax(dim=1))