运行pytorch_geometric-master,下examples中的程序dgcnn_segmentation.py出现如下错误,问题应该出现在这段代码collate_fn=lambda batch: collate(batch),没有找到解决方法?,
super(DataLoader,
self).__init__(dataset, batch_size, shuffle,
collate_fn=lambda batch: collate(batch), **kwargs)
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-7-1c7566fb5b62> in <module>
86
87 for epoch in range(1, 31):
---> 88 train()
89 acc, iou = test(test_loader)
90 print('Epoch: {:02d}, Acc: {:.4f}, IoU: {:.4f}'.format(epoch, acc, iou))
<ipython-input-7-1c7566fb5b62> in train()
32
33 total_loss = correct_nodes = total_nodes = 0
---> 34 for i, data in enumerate(train_loader):
35 data = data.to(device)
36 optimizer.zero_grad()
D:\Anaconda3\lib\site-packages\torch\utils\data\dataloader.py in __iter__(self)
276 return _SingleProcessDataLoaderIter(self)
277 else:
--> 278 return _MultiProcessingDataLoaderIter(self)
279
280 @property
D:\Anaconda3\lib\site-packages\torch\utils\data\dataloader.py in __init__(self, loader)
680 # before it starts, and __del__ tries to join but will get:
681 # AssertionError: can only join a started process.
--> 682 w.start()
683 self._index_queues.append(index_queue)
684 self._workers.append(w)
D:\Anaconda3\lib\multiprocessing\process.py in start(self)
110 'daemonic processes are not allowed to have children'
111 _cleanup()
--> 112 self._popen = self._Popen(self)
113 self._sentinel = self._popen.sentinel
114 # Avoid a refcycle if the target function holds an indirect
D:\Anaconda3\lib\multiprocessing\context.py in _Popen(process_obj)
221 @staticmethod
222 def _Popen(process_obj):
--> 223 return _default_context.get_context().Process._Popen(process_obj)
224
225 class DefaultContext(BaseContext):
D:\Anaconda3\lib\multiprocessing\context.py in _Popen(process_obj)
320 def _Popen(process_obj):
321 from .popen_spawn_win32 import Popen
--> 322 return Popen(process_obj)
323
324 class SpawnContext(BaseContext):
D:\Anaconda3\lib\multiprocessing\popen_spawn_win32.py in __init__(self, process_obj)
87 try:
88 reduction.dump(prep_data, to_child)
---> 89 reduction.dump(process_obj, to_child)
90 finally:
91 set_spawning_popen(None)
D:\Anaconda3\lib\multiprocessing\reduction.py in dump(obj, file, protocol)
58 def dump(obj, file, protocol=None):
59 '''Replacement for pickle.dump() using ForkingPickler.'''
---> 60 ForkingPickler(file, protocol).dump(obj)
61
62 #
AttributeError: Can't pickle local object 'DataLoader.__init__.<locals>.<lambda>'