【1】利用dataloader函数完成批量训练
【2】代码
# -*- coding: utf-8 -*-#
#-------------------------------------------------------------------------------
# Name: batchtest
# Description:
# Author: Administrator
# Date: 2020/11/29
#-------------------------------------------------------------------------------
import torch
import torch.utils.data as Data
torch.manual_seed(1) # reproducible
BATCH_SIZE=5
if __name__ == '__main__':
x = torch.linspace(1, 10, 10) # x data (torch tensor)
y = torch.linspace(10, 1, 10) # y data (torch tensor)
torch_dataset=Data.TensorDataset(x,y)
loader=Data.DataLoader(
dataset=torch_dataset,
batch_size=BATCH_SIZE,
shuffle=True,#数据是否打乱
num_workers=2,#采用几个线程
)
for epoch in range(3):
for step ,(batch_x,batch_y) in enumerate(loader):
# 打出来一些数据
print('Epoch: ', epoch, '| Step: ', step, '| batch x: ',
batch_x.numpy(), '| batch y: ', batch_y.numpy())
【3】报错问题
添加if __name__ == '__main__':的原因: (1)批训练错误之got an unexpected keyword...
(2)github源码 https://github.com/MorvanZhou/PyTorch-Tutorial/blob/master/tutorial-contents/305_batch_train.py
【4】更改后的代码
# -*- coding: utf-8 -*-#
#-------------------------------------------------------------------------------
# Name: batchtest
# Description:
# Author: Administrator
# Date: 2020/11/29
#-------------------------------------------------------------------------------
import torch
import torch.utils.data as Data
torch.manual_seed(1) # reproducible
BATCH_SIZE=5
x = torch.linspace(1, 10, 10) # x data (torch tensor)
y = torch.linspace(10, 1, 10) # y data (torch tensor)
torch_dataset=Data.TensorDataset(x,y)
loader=Data.DataLoader(
dataset=torch_dataset,
batch_size=BATCH_SIZE,
shuffle=True,#数据是否打乱
# num_workers=2,#采用几个线程
)
for epoch in range(3):
for step ,(batch_x,batch_y) in enumerate(loader):
# 打出来一些数据
print('Epoch: ', epoch, '| Step: ', step, '| batch x: ',
batch_x.numpy(), '| batch y: ', batch_y.numpy())
【5】结果图