Open akhilsadam opened 3 years ago
Same problem
Check the solution from https://github.com/vincentherrmann/pytorch-wavenet/issues/37
This issue is specifically for those who have tried (and failed with) the solution in #37.
commenting out num_workers in wavenet_training.py did not help, nor did setting num_workers=0. torch 1.6.0, Python 3.
same problem
On macOs, changing the dataloader init to be this has worked for me: self.dataloader = torch.utils.data.DataLoader(self.dataset, batch_size=batch_size, shuffle=True, num_workers=8, pin_memory=False, multiprocessing_context="fork")
WaveNet_demo.ipynb - trainer.train(batch_size=16,epochs=10):
Commenting out num_workers in wavenet_training.py did not help, nor did setting num_workers=0. torch 1.6.0, Python 3.
Thank you for the help!
~\Desktop\RUST\AudioSynth\pytorch-wavenet-master\wavenet_training.py in train(self, batch_size, epochs, continue_training_at_step) 62 63 if step % self.snapshot_interval == 0: ---> 64 if self.snapshot_path is None: 65 continue 66 timestring = time.strftime("%Y-%m-%d%H-%M-%S", time.gmtime())
~\AppData\Roaming\Python\Python38\site-packages\torch\utils\data\dataloader.py in iter(self) 289 return _SingleProcessDataLoaderIter(self) 290 else: --> 291 return _MultiProcessingDataLoaderIter(self) 292 293 @property
~\AppData\Roaming\Python\Python38\site-packages\torch\utils\data\dataloader.py in init(self, loader) 735 # before it starts, and del tries to join but will get: 736 # AssertionError: can only join a started process. --> 737 w.start() 738 self._index_queues.append(index_queue) 739 self._workers.append(w)
C:\Program Files\Python3.8\lib\multiprocessing\process.py in start(self) 119 'daemonic processes are not allowed to have children' 120 _cleanup() --> 121 self._popen = self._Popen(self) 122 self._sentinel = self._popen.sentinel 123 # Avoid a refcycle if the target function holds an indirect
C:\Program Files\Python3.8\lib\multiprocessing\context.py in _Popen(process_obj) 222 @staticmethod 223 def _Popen(process_obj): --> 224 return _default_context.get_context().Process._Popen(process_obj) 225 226 class DefaultContext(BaseContext):
C:\Program Files\Python3.8\lib\multiprocessing\context.py in _Popen(process_obj) 324 def _Popen(process_obj): 325 from .popen_spawn_win32 import Popen --> 326 return Popen(process_obj) 327 328 class SpawnContext(BaseContext):
C:\Program Files\Python3.8\lib\multiprocessing\popen_spawn_win32.py in init(self, process_obj) 91 try: 92 reduction.dump(prep_data, to_child) ---> 93 reduction.dump(process_obj, to_child) 94 finally: 95 set_spawning_popen(None)
C:\Program Files\Python3.8\lib\multiprocessing\reduction.py in dump(obj, file, protocol) 58 def dump(obj, file, protocol=None): 59 '''Replacement for pickle.dump() using ForkingPickler.''' ---> 60 ForkingPickler(file, protocol).dump(obj) 61 62 #
TypeError: cannot pickle '_io.BufferedReader' object