Traceback (most recent call last):
File "main.py", line 737, in <module>
main(args)
File "main.py", line 654, in main
train_stats = train_one_epoch(
File "/home/work/.tubedetr/engine.py", line 67, in train_one_epoch
memory_cache = model(
File "/home/work/.conda/envs/tubedetr/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/home/work/.conda/envs/tubedetr/lib/python3.8/site-packages/torch/nn/parallel/distributed.py", line 1156, in forward
output = self._run_ddp_forward(*inputs, **kwargs)
File "/home/work/.conda/envs/tubedetr/lib/python3.8/site-packages/torch/nn/parallel/distributed.py", line 1110, in _run_ddp_forward
return module_to_run(*inputs[0], **kwargs[0]) # type: ignore[index]
File "/home/work/.conda/envs/tubedetr/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
return forward_call(*args, **kwargs)
File "/home/work/.tubedetr/models/tubedetr.py", line 180, in forward
tpad_src = tpad_src.view(b * n_clips, f, h, w)
RuntimeError: shape '[114, 256, 5, 8]' is invalid for input of size 634880
How can i fit tensor size?