#逻辑回归#sigmoid函数,与门电路import torch
X = torch.tensor([[1,0,0],[1,1,0],[1,0,1,],[1,1,1]],dtype = torch.float32)
andgate = torch.tensor([[0],[0],[0],[1]],dtype = torch.float32)
w = torch.tensor([-0.2,0.15,0.15],dtype = torch.float32)# b,w1,w2defLogisticR(X,w):
zhat = torch.mv(X,w)
sigam = torch.sigmoid(zhat)#sigam = 1/(1+torch.exp(-zhat))
andhat = torch.tensor([int(x)for x in sigam>=0.5],dtype = torch.float32)#int(True)=1,int(False)=0return sigam,andgate
sigam,andhat = LogisticR(X,w)print(sigam)print(andgate)print(andhat)
#阶跃函数,与门电路import torch
X = torch.tensor([[1,0,0],[1,1,0],[1,0,1,],[1,1,1]],dtype = torch.float32)
andgate = torch.tensor([[0],[0],[0],[1]],dtype = torch.float32)
w = torch.tensor([-0.2,0.15,0.15],dtype = torch.float32)# b,w1,w2defLinearRwithsign(X,w):
zhat = torch.mv(X,w)
andhat = torch.tensor([int(x)for x in zhat>=0],dtype = torch.float32)#int(True)=1,int(False)=0return zhat,andgate
zhat,andgate = LinearRwithsign(X,w)print(andgate)print(andhat)
import torch
from torch.nn import functional as F
X = torch.tensor([[0,0],[1,0],[0,1],[1,1]],dtype = torch.float32)
torch.random.manual_seed(420)
dense = torch.nn.Linear(2,1)
zhat = dense(X)
sigam = F.torch.sigmoid(zhat)
y =[int(x)for x in sigam>=0.5]
#sigmoid与门import torch
from torch.nn import functional as F
X = torch.tensor([[0,0],[1,0],[0,1],[1,1]], dtype = torch.float32)#andgate = torch.tensor([[0,],[0],[0],[1]])
torch.random.manual_seed(200)
dense = torch.nn.Linear(2,1)
zhat = dense(X)
sigma = F.torch.sigmoid(zhat)
y =[int(x)for x in sigma >0.5]
#softmax函数import torch
from torch.nn import functional as F
X = torch.tensor([[0,0],[1,0],[0,1],[1,1]], dtype = torch.float32)
torch.random.manual_seed(420)
dense = torch.nn.Linear(2,3)
zhat = dense(X)print(zhat)
sigma = F.softmax(zhat,dim=1)print(sigma)