File "E:\work\code\Car_recognition-master\train.py", line 521, in
train(hyp, opt, device, tb_writer, wandb)
File "E:\work\code\Car_recognition-master\train.py", line 291, in train
loss, loss_items = compute_loss(pred, targets.to(device), model) # loss scaled by batch_size
File "E:\work\code\Car_recognition-master\utils\loss.py", line 163, in compute_loss
lcls += BCEcls(ps[:, 13:], t) # BCE
File "E:\soft1\conda\lib\site-packages\torch\nn\modules\module.py", line 1190, in _call_impl
return forward_call(*input, **kwargs)
File "E:\soft1\conda\lib\site-packages\torch\nn\modules\loss.py", line 720, in forward
return F.binary_cross_entropy_with_logits(input, target,
File "E:\soft1\conda\lib\site-packages\torch\nn\functional.py", line 3162, in binary_cross_entropy_with_logits
return torch.binary_cross_entropy_with_logits(input, target, weight, pos_weight, reduction_enum)
File "E:\soft1\conda\lib\site-packages\torch\fx\traceback.py", line 57, in format_stack
return traceback.format_stack()
(Triggered internally at C:\actions-runner_work\pytorch\pytorch\builder\windows\pytorch\torch\csrc\autograd\python_anomaly_mode.cpp:119.)
Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass
0%| | 0/24 [00:00<?, ?it/s]
Traceback (most recent call last):
File "E:\work\code\Car_recognition-master\train.py", line 521, in
train(hyp, opt, device, tb_writer, wandb)
File "E:\work\code\Car_recognition-master\train.py", line 296, in train
scaler.scale(loss).backward()
File "E:\soft1\conda\lib\site-packages\torch_tensor.py", line 487, in backward
torch.autograd.backward(
File "E:\soft1\conda\lib\site-packages\torch\autograd__init__.py", line 197, in backward
Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass
RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation: [torch.FloatTensor [30, 2]], which is output 0 of AsStridedBackward0, is at
version 4; expected version 0 instead. Hint: the backtrace further above shows the operation that failed to compute its gradient. The variable in question was changed in there or anywhere later. Good luck!
File "E:\work\code\Car_recognition-master\train.py", line 521, in
train(hyp, opt, device, tb_writer, wandb)
File "E:\work\code\Car_recognition-master\train.py", line 291, in train
loss, loss_items = compute_loss(pred, targets.to(device), model) # loss scaled by batch_size
File "E:\work\code\Car_recognition-master\utils\loss.py", line 163, in compute_loss
lcls += BCEcls(ps[:, 13:], t) # BCE
File "E:\soft1\conda\lib\site-packages\torch\nn\modules\module.py", line 1190, in _call_impl
return forward_call(*input, **kwargs)
File "E:\soft1\conda\lib\site-packages\torch\nn\modules\loss.py", line 720, in forward
return F.binary_cross_entropy_with_logits(input, target,
File "E:\soft1\conda\lib\site-packages\torch\nn\functional.py", line 3162, in binary_cross_entropy_with_logits
return torch.binary_cross_entropy_with_logits(input, target, weight, pos_weight, reduction_enum)
File "E:\soft1\conda\lib\site-packages\torch\fx\traceback.py", line 57, in format_stack
return traceback.format_stack()
(Triggered internally at C:\actions-runner_work\pytorch\pytorch\builder\windows\pytorch\torch\csrc\autograd\python_anomaly_mode.cpp:119.)
Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass
0%| | 0/24 [00:00<?, ?it/s]
Traceback (most recent call last):
File "E:\work\code\Car_recognition-master\train.py", line 521, in
train(hyp, opt, device, tb_writer, wandb)
File "E:\work\code\Car_recognition-master\train.py", line 296, in train
scaler.scale(loss).backward()
File "E:\soft1\conda\lib\site-packages\torch_tensor.py", line 487, in backward
torch.autograd.backward(
File "E:\soft1\conda\lib\site-packages\torch\autograd__init__.py", line 197, in backward
Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass
RuntimeError: one of the variables needed for gradient computation has been modified by an inplace operation: [torch.FloatTensor [30, 2]], which is output 0 of AsStridedBackward0, is at
version 4; expected version 0 instead. Hint: the backtrace further above shows the operation that failed to compute its gradient. The variable in question was changed in there or anywhere later. Good luck!