PyTorch Lecture 06: Logistic Regression

 


记住常用的句型(用多了,应该就记住了)

import torch
from torch.autograd import Variable
import torch.nn.functional as F

x_data = Variable(torch.Tensor([[1.0], [2.0], [3.0], [4.0]]))
y_data = Variable(torch.Tensor([[0], [0], [1], [1]]))


# 第1步
class Model(torch.nn.Module):
    def __init__(self):
        super(Model, self).__init__()
        self.linear = torch.nn.Linear(1, 1)  # one in and one out

    def forward(self, x):
        y_pred = F.sigmoid(self.linear(x))
        return y_pred


# our model
model = Model()

# 第2步
# Construct our loss function and an Optimizer. The call to model.parameters()
# in the SGD constructor will contain the learnable parameters of the two
# nn.Linear modules which are members of the model.
criterion = torch.nn.BCELoss(size_average=True)
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)

# Training loop

for epoch in range(1000):
    # Forward pass: Compute predicted y by passing x to the model
    y_pred = model(x_data)

    # compute loss and print loss
    loss = criterion(y_pred, y_data)
    print(epoch, loss.data[0])

   # Zero gradients ,perform a backward pass,and update weights.
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

# After training
hour_var=Variable(torch.Tensor([[1.0]]))
print("predict 1 hour",1.0,model(hour_var).data[0][0]>0.5)
hour_var=Variable(torch.Tensor([[7.0]]))
print("predict 1 hour",7.0,model(hour_var).data[0][0]>0.5)

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值