I am a college student interested in your work and currently attempting to leverage your published model for my course project. However while trying to run playground.ipynb (also tried in the command line, same error), I encountered the error below.
Please check it and help me solve this problem. Thank you very much!
File ~/anaconda3/envs/CFLD/lib/python3.10/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs)
1536 # If we don't have any hooks, we want to skip the rest of the logic in
1537 # this function, and just call forward.
1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1539 or _global_backward_pre_hooks or _global_backward_hooks
1540 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1541 return forward_call(args, **kwargs)
1543 try:
1544 result = None
File ~/anaconda3/envs/CFLD/lib/python3.10/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs)
1536 # If we don't have any hooks, we want to skip the rest of the logic in
1537 # this function, and just call forward.
1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1539 or _global_backward_pre_hooks or _global_backward_hooks
1540 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1541 return forward_call(args, **kwargs)
1543 try:
1544 result = None
File /home/dlpj/CFLD/models/unet.py:1684, in ResidualUNet2DConditionModel.forward(self, sample, timestep, encoder_hidden_states, class_labels, timestep_cond, attention_mask, cross_attention_kwargs, added_cond_kwargs, down_block_additional_residuals, mid_block_additional_residual, up_block_additional_residuals, encoder_attention_mask, return_dict)
1681 encoder_attention_mask = encoder_attention_mask.unsqueeze(1)
1683 # 0. center input if necessary
-> 1684 if self.config.center_input_sample:
1685 sample = 2 * sample - 1.0
1687 # 1. time
AttributeError: 'FrozenDict' object has no attribute 'center_input_sample'
Hello!
I am a college student interested in your work and currently attempting to leverage your published model for my course project. However while trying to run playground.ipynb (also tried in the command line, same error), I encountered the error below.
Please check it and help me solve this problem. Thank you very much!
AttributeError Traceback (most recent call last) Cell In[10], line 18 16 inputs = torch.cat([noisy_latents, noisy_latents, noisy_latents], dim=0) 17 inputs = noise_scheduler.scale_model_input(inputs, timestep=t) ---> 18 noise_pred = unet(sample=inputs, timestep=t, encoder_hidden_states=c_new, 19 down_block_additional_residuals=copy.deepcopy(down_block_additional_residuals), 20 up_block_additional_residuals=copy.deepcopy(up_block_additional_residuals)) 22 noise_pred_uc, noise_pred_down, noise_pred_full = noise_pred.chunk(3) 23 noise_pred = noise_pred_uc + \ 24 cfg.TEST.DOWN_BLOCK_GUIDANCE_SCALE (noise_pred_down - noise_pred_uc) + \ 25 cfg.TEST.FULL_GUIDANCE_SCALE (noise_pred_full - noise_pred_down)
File ~/anaconda3/envs/CFLD/lib/python3.10/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)
File ~/anaconda3/envs/CFLD/lib/python3.10/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None
File /home/dlpj/CFLD/models/unet.py:1946, in UNet.forward(self, sample, timestep, kwargs) 1945 def forward(self, sample, timestep, kwargs): -> 1946 return self.model(sample, timestep, **kwargs).sample
File ~/anaconda3/envs/CFLD/lib/python3.10/site-packages/torch/nn/modules/module.py:1532, in Module._wrapped_call_impl(self, *args, kwargs) 1530 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1531 else: -> 1532 return self._call_impl(args, kwargs)
File ~/anaconda3/envs/CFLD/lib/python3.10/site-packages/torch/nn/modules/module.py:1541, in Module._call_impl(self, *args, *kwargs) 1536 # If we don't have any hooks, we want to skip the rest of the logic in 1537 # this function, and just call forward. 1538 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1539 or _global_backward_pre_hooks or _global_backward_hooks 1540 or _global_forward_hooks or _global_forward_pre_hooks): -> 1541 return forward_call(args, **kwargs) 1543 try: 1544 result = None
File /home/dlpj/CFLD/models/unet.py:1684, in ResidualUNet2DConditionModel.forward(self, sample, timestep, encoder_hidden_states, class_labels, timestep_cond, attention_mask, cross_attention_kwargs, added_cond_kwargs, down_block_additional_residuals, mid_block_additional_residual, up_block_additional_residuals, encoder_attention_mask, return_dict) 1681 encoder_attention_mask = encoder_attention_mask.unsqueeze(1) 1683 # 0. center input if necessary -> 1684 if self.config.center_input_sample: 1685 sample = 2 * sample - 1.0 1687 # 1. time
AttributeError: 'FrozenDict' object has no attribute 'center_input_sample'