pytorch中Dataloader()中的num_workers设置问题:
如果num_workers的值大于0,要在运行的部分放进__main__()函数里,才不会有错:
import numpy as np
import torch
from torch.autograd import Variable
import torch.nn.functional
import matplotlib.pyplot as plt
import torch.utils.data as Data
BATCH_SIZE=5
x=torch.linspace(1,10,10)
y=torch.linspace(10,1,10)
torch_dataset=Data.TensorDataset(x,y)
loader=Data.DataLoader(
dataset=torch_dataset,
batch_size=BATCH_SIZE,
shuffle=True,
num_workers=2,
)
def main():
for epoch in range(3):
for step,(batch_x,batch_y) in enumerate(loader):
# training....
print('Epoch:',epoch,'| step:',step,'| batch x:',batch_x.numpy(),
'| batch y:',batch_y.numpy())
if __name__=="__main__":
main()
'''
# 下面这样直接运行会报错:
for epoch in range(3):
for step,(batch_x,batch_y) in enumerate(loader):
# training....
print('Epoch:',epoch,'| step:',step,'| batch x:',batch_x.numpy(),
'| batch y:',batch_y.numpy()
'''