IrisRainbowNeko / HCP-Diffusion

A universal Stable-Diffusion toolbox
Apache License 2.0
899 stars 75 forks source link

training lora encouter error about Tensor #45

Closed ritsuyan closed 11 months ago

ritsuyan commented 11 months ago

follow this article Error Message: File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 1212, in validate raise Exception( Exception: Please convert all Tensors to FakeTensors first or instantiate FakeTensorMode with 'allow_non_fake_inputs'. Found in aten.clone.default(*(tensor([255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 82, 191, 1, 0, 0, 0, 0, 0, 72, 3, 0, 0, 0, 0, 0, 0], dtype=torch.uint8),), **{'memory_format': torch.contiguous_format})

The above exception was the direct cause of the following exception:

Traceback (most recent call last): File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 324, in _compile out_code = transform_code_object(code, transform) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/bytecode_transformation.py", line 445, in transform_code_object transformations(instructions, code_options) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 311, in transform tracer.run() File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 1726, in run super().run() File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 576, in run and self.step() File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 540, in step getattr(self, inst.opname)(inst) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 1792, in RETURN_VALUE self.output.compile_subgraph( File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 517, in compile_subgraph self.compile_and_call_fx_graph(tx, list(reversed(stack_values)), root) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 588, in compile_and_call_fx_graph compiled_fn = self.call_user_compiler(gm) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/utils.py", line 163, in time_wrapper r = func(*args, *kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 675, in call_user_compiler raise BackendCompilerFailed(self.compiler_fn, e) from e torch._dynamo.exc.BackendCompilerFailed: debug_wrapper raised Exception: Please convert all Tensors to FakeTensors first or instantiate FakeTensorMode with 'allow_non_fake_inputs'. Found in aten.clone.default((tensor([255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 82, 191, 1, 0, 0, 0, 0, 0, 72, 3, 0, 0, 0, 0, 0, 0], dtype=torch.uint8),), **{'memory_format': torch.contiguous_format})

[2023-11-09 16:43:07,786] torch._dynamo.convert_frame: [ERROR] WON'T CONVERT custom_forward /root/miniconda3/lib/python3.10/site-packages/diffusers/models/unet_2d_blocks.py line 2203 2204 0 LOAD_DEREF 1 (return_dict) 2 LOAD_CONST 0 (None) 4 IS_OP 1 6 POP_JUMP_IF_FALSE 11 (to 22)

2205 8 LOAD_DEREF 0 (module) 10 LOAD_FAST 0 (inputs) 12 LOAD_CONST 1 ('return_dict') 14 LOAD_DEREF 1 (return_dict) 16 BUILD_MAP 1 18 CALL_FUNCTION_EX 1 20 RETURN_VALUE

2207 >> 22 LOAD_DEREF 0 (module) 24 LOAD_FAST 0 (inputs) 26 CALL_FUNCTION_EX 0 28 RETURN_VALUE

========== TorchDynamo Stack Trace ========== Traceback (most recent call last): File "/root/miniconda3/lib/python3.10/site-packages/torch/random.py", line 133, in fork_rng yield File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 414, in unpack set_device_states(fwd_gpu_devices, fwd_gpu_states) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 58, in set_device_states torch.cuda.set_rng_state(state) File "/root/miniconda3/lib/python3.10/site-packages/torch/cuda/random.py", line 51, in set_rng_state new_state_copy = new_state.clone(memory_format=torch.contiguous_format) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_stats.py", line 20, in wrapper return fn(*args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 487, in torch_dispatch__ return self.inner_torch_dispatch(func, types, args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 512, in inner_torch_dispatch out = proxy_call(self, func, args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 345, in proxy_call out = func(*args, **kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_ops.py", line 287, in call__ return self._op(*args, kwargs or {}) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_stats.py", line 20, in wrapper return fn(*args, *kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 987, in __torch_dispatch__ return self.dispatch(func, types, args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 1066, in dispatch args, kwargs = self.validate_and_convert_non_fake_tensors( File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 1220, in validate_and_convert_non_fake_tensors return tree_map_only( File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 266, in tree_map_only return tree_map(map_only(ty)(fn), pytree) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 196, in tree_map return tree_unflatten([fn(i) for i in flat_args], spec) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 196, in return tree_unflatten([fn(i) for i in flat_args], spec) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 247, in inner return f(x) File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 1212, in validate raise Exception( Exception: Please convert all Tensors to FakeTensors first or instantiate FakeTensorMode with 'allow_non_fake_inputs'. Found in aten.clone.default((tensor([255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 82, 191, 1, 0, 0, 0, 0, 0, 72, 3, 0, 0, 0, 0, 0, 0], dtype=torch.uint8),), **{'memory_format': torch.contiguous_format})

During handling of the above exception, another exception occurred:

Traceback (most recent call last): File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 670, in call_user_compiler compiled_fn = compiler_fn(gm, self.fake_example_inputs()) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/debug_utils.py", line 1055, in debug_wrapper compiled_gm = compiler_fn(gm, example_inputs) File "/root/miniconda3/lib/python3.10/site-packages/torch/init.py", line 1390, in call return compilefx(model, inputs_, config_patches=self.config) File "/root/miniconda3/lib/python3.10/site-packages/torch/_inductor/compile_fx.py", line 455, in compile_fx return aot_autograd( File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/backends/common.py", line 48, in compiler_fn cg = aot_module_simplified(gm, example_inputs, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 2822, in aot_module_simplified compiled_fn = create_aot_dispatcher_function( File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/utils.py", line 163, in time_wrapper r = func(*args, *kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 2515, in create_aot_dispatcher_function compiled_fn = compiler_fn(flat_fn, fake_flat_args, aot_config) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 1715, in aot_wrapper_dedupe return compiler_fn(flat_fn, leaf_flat_args, aot_config) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 2104, in aot_dispatch_autograd fx_g = make_fx(joint_forward_backward, aot_config.decompositions)( File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 714, in wrapped t = dispatch_trace(wrap_key(func, args, fx_tracer), tracer=fx_tracer, concrete_args=tuple(phs)) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py", line 209, in _fn return fn(args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 443, in dispatch_trace graph = tracer.trace(root, concrete_args) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py", line 209, in _fn return fn(*args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/_symbolic_trace.py", line 778, in trace (self.create_arg(fn(args)),), File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/_symbolic_trace.py", line 652, in flatten_fn tree_out = root_fn(tree_args) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 459, in wrapped out = f(tensors) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 1158, in traced_joint return functionalized_f_helper(primals, tangents) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 1110, in functionalized_f_helper f_outs = flat_fn_no_input_mutations(fn, f_primals, f_tangents, meta, keep_input_mutations) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 1078, in flat_fn_no_input_mutations outs = flat_fn_with_synthetic_bases_expanded(fn, primals, primals_after_cloning, maybe_tangents, meta, keep_input_mutations) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 1050, in flat_fn_with_synthetic_bases_expanded outs = forward_or_joint(fn, primals_before_cloning, primals, maybe_tangents, meta, keep_input_mutations) File "/root/miniconda3/lib/python3.10/site-packages/torch/_functorch/aot_autograd.py", line 1019, in forward_or_joint backward_out = torch.autograd.grad( File "/root/miniconda3/lib/python3.10/site-packages/torch/autograd/init.py", line 269, in grad return handle_torch_function( File "/root/miniconda3/lib/python3.10/site-packages/torch/overrides.py", line 1534, in handle_torch_function result = mode.torch_function(public_api, types, args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_inductor/overrides.py", line 38, in torch_function return func(args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/autograd/init.py", line 303, in grad return Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 410, in unpack with torch.random.fork_rng(devices=rng_devices, enabled=preserve_rng_state): File "/root/miniconda3/lib/python3.10/contextlib.py", line 153, in exit self.gen.throw(typ, value, traceback) File "/root/miniconda3/lib/python3.10/site-packages/torch/random.py", line 137, in fork_rng torch.cuda.set_rng_state(gpu_rng_state, device) File "/root/miniconda3/lib/python3.10/site-packages/torch/cuda/random.py", line 51, in set_rng_state new_state_copy = new_state.clone(memory_format=torch.contiguous_format) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_stats.py", line 20, in wrapper return fn(*args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 487, in torch_dispatch__ return self.inner_torch_dispatch(func, types, args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 512, in inner_torch_dispatch out = proxy_call(self, func, args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/fx/experimental/proxy_tensor.py", line 345, in proxy_call out = func(*args, **kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_ops.py", line 287, in call__ return self._op(*args, kwargs or {}) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_stats.py", line 20, in wrapper return fn(*args, *kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 987, in __torch_dispatch__ return self.dispatch(func, types, args, kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 1066, in dispatch args, kwargs = self.validate_and_convert_non_fake_tensors( File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 1220, in validate_and_convert_non_fake_tensors return tree_map_only( File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 266, in tree_map_only return tree_map(map_only(ty)(fn), pytree) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 196, in tree_map return tree_unflatten([fn(i) for i in flat_args], spec) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 196, in return tree_unflatten([fn(i) for i in flat_args], spec) File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/_pytree.py", line 247, in inner return f(x) File "/root/miniconda3/lib/python3.10/site-packages/torch/_subclasses/fake_tensor.py", line 1212, in validate raise Exception( Exception: Please convert all Tensors to FakeTensors first or instantiate FakeTensorMode with 'allow_non_fake_inputs'. Found in aten.clone.default((tensor([255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 82, 191, 1, 0, 0, 0, 0, 0, 72, 3, 0, 0, 0, 0, 0, 0], dtype=torch.uint8),), **{'memory_format': torch.contiguous_format})

The above exception was the direct cause of the following exception:

Traceback (most recent call last): File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 324, in _compile out_code = transform_code_object(code, transform) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/bytecode_transformation.py", line 445, in transform_code_object transformations(instructions, code_options) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/convert_frame.py", line 311, in transform tracer.run() File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 1726, in run super().run() File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 576, in run and self.step() File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 540, in step getattr(self, inst.opname)(inst) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/symbolic_convert.py", line 1792, in RETURN_VALUE self.output.compile_subgraph( File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 517, in compile_subgraph self.compile_and_call_fx_graph(tx, list(reversed(stack_values)), root) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 588, in compile_and_call_fx_graph compiled_fn = self.call_user_compiler(gm) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/utils.py", line 163, in time_wrapper r = func(*args, *kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_dynamo/output_graph.py", line 675, in call_user_compiler raise BackendCompilerFailed(self.compiler_fn, e) from e torch._dynamo.exc.BackendCompilerFailed: debug_wrapper raised Exception: Please convert all Tensors to FakeTensors first or instantiate FakeTensorMode with 'allow_non_fake_inputs'. Found in aten.clone.default((tensor([255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 82, 191, 1, 0, 0, 0, 0, 0, 72, 3, 0, 0, 0, 0, 0, 0], dtype=torch.uint8),), **{'memory_format': torch.contiguous_format})

Traceback (most recent call last): File "/root/miniconda3/lib/python3.10/runpy.py", line 196, in _run_module_as_main return _run_code(code, main_globals, None, File "/root/miniconda3/lib/python3.10/runpy.py", line 86, in _run_code exec(code, run_globals) File "/root/HCP-Diffusion/hcpdiff/train_ac_single.py", line 48, in trainer.train() File "/root/HCP-Diffusion/hcpdiff/train_ac.py", line 405, in train loss = self.train_one_step(data_list) File "/root/HCP-Diffusion/hcpdiff/train_ac.py", line 488, in train_one_step self.accelerator.backward(loss) File "/root/miniconda3/lib/python3.10/site-packages/accelerate/accelerator.py", line 1983, in backward self.scaler.scale(loss).backward(**kwargs) File "/root/miniconda3/lib/python3.10/site-packages/torch/_tensor.py", line 487, in backward torch.autograd.backward( File "/root/miniconda3/lib/python3.10/site-packages/torch/autograd/init.py", line 200, in backward Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass File "/root/miniconda3/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 423, in unpack raise RuntimeError( RuntimeError: Attempt to retrieve a tensor saved by autograd multiple times without checkpoint recomputation being triggered in between, this is not currently supported. Please open an issue with details on your use case so that we can prioritize adding this. Traceback (most recent call last): File "/root/miniconda3/bin/accelerate", line 8, in sys.exit(main()) File "/root/miniconda3/lib/python3.10/site-packages/accelerate/commands/accelerate_cli.py", line 47, in main args.func(args) File "/root/miniconda3/lib/python3.10/site-packages/accelerate/commands/launch.py", line 986, in launch_command simple_launcher(args) File "/root/miniconda3/lib/python3.10/site-packages/accelerate/commands/launch.py", line 628, in simple_launcher raise subprocess.CalledProcessError(returncode=process.returncode, cmd=cmd) subprocess.CalledProcessError: Command '['/root/miniconda3/bin/python', '-m', 'hcpdiff.train_ac_single', '--cfg', 'cfgs/train/examples/lora_anime_character.yaml', 'character_name=surtr_arknights', 'dataset_dir=data/surtr_dataset']' returned non-zero exit status 1.

IrisRainbowNeko commented 11 months ago

Since lora uses the hook mechanism, torch dynamo is not supported yet.