Closed BakerBunker closed 1 year ago
│ audiolm_pytorch/trainer.p │ │ y:261 in save │ │ │ │ 258 │ │ pkg = dict( │ │ 259 │ │ │ model = self.accelerator.get_state_dict(self.soundstream), │ │ 260 │ │ │ optim = self.optim.state_dict(), │ │ ❱ 261 │ │ │ config = self.soundstream._configs, │ │ 262 │ │ │ discr_optim = self.discr_optim.state_dict(), │ │ 263 │ │ │ version = __version__ │ │ 264 │ │ ) │ │ │ │torch/nn/modules/module.p │ │ y:1265 in __getattr__ │ │ │ │ 1262 │ │ │ modules = self.__dict__['_modules'] │ │ 1263 │ │ │ if name in modules: │ │ 1264 │ │ │ │ return modules[name] │ │ ❱ 1265 │ │ raise AttributeError("'{}' object has no attribute '{}'".format( │ │ 1266 │ │ │ type(self).__name__, name)) │ │ 1267 │ │ │ 1268 │ def __setattr__(self, name: str, value: Union[Tensor, 'Module']) -> None: │ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ AttributeError: 'DistributedDataParallel' object has no attribute '_configs'
@BakerBunker oh oops, fixed here!