Traceback (most recent call last):
File "e:\git clone download\BERT-BILSTM-CRF\main.py", line 190, in
main(data_name)
File "e:\git clone download\BERT-BILSTM-CRF\main.py", line 182, in main
train.train()
File "e:\git clone download\BERT-BILSTM-CRF\main.py", line 52, in train
output = self.model(input_ids, attention_mask, labels)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, kwargs)
File "e:\git clone download\BERT-BILSTM-CRF\model.py", line 27, in forward
bert_output = self.bert(input_ids=input_ids, attention_mask=attention_mask)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, *kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 1013, in forward
encoder_outputs = self.encoder(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(input, kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 607, in forward
layer_outputs = layer_module(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 497, in forward
self_attention_outputs = self.attention(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, *kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 427, in forward
self_outputs = self.self(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(input, kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 286, in forward
mixed_query_layer = self.query(hidden_states)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, **kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\linear.py", line 103, in forward
return F.linear(input, self.weight, self.bias)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\functional.py", line 1848, in linear
return torch._C._nn.linear(input, weight, bias)
RuntimeError: CUDA error: CUBLAS_STATUS_NOT_INITIALIZED when calling cublasCreate(handle)
请问这个问题该怎么解决呢?
Traceback (most recent call last): File "e:\git clone download\BERT-BILSTM-CRF\main.py", line 190, in
main(data_name)
File "e:\git clone download\BERT-BILSTM-CRF\main.py", line 182, in main
train.train()
File "e:\git clone download\BERT-BILSTM-CRF\main.py", line 52, in train
output = self.model(input_ids, attention_mask, labels)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, kwargs)
File "e:\git clone download\BERT-BILSTM-CRF\model.py", line 27, in forward
bert_output = self.bert(input_ids=input_ids, attention_mask=attention_mask)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, *kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 1013, in forward
encoder_outputs = self.encoder(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(input, kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 607, in forward
layer_outputs = layer_module(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 497, in forward
self_attention_outputs = self.attention(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, *kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 427, in forward
self_outputs = self.self(
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(input, kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\transformers\models\bert\modeling_bert.py", line 286, in forward
mixed_query_layer = self.query(hidden_states)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\module.py", line 1102, in _call_impl
return forward_call(*input, **kwargs)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\modules\linear.py", line 103, in forward
return F.linear(input, self.weight, self.bias)
File "E:\miniconda3\envs\cyclegan\lib\site-packages\torch\nn\functional.py", line 1848, in linear
return torch._C._nn.linear(input, weight, bias)
RuntimeError: CUDA error: CUBLAS_STATUS_NOT_INITIALIZED when calling
cublasCreate(handle)
请问这个问题该怎么解决呢?