ajhalthor / Transformer-Neural-Network

Code Transformer neural network components piece by piece
MIT License
278 stars 149 forks source link

IndexError: index out of range in self #9

Open aahilalwani25 opened 6 months ago

aahilalwani25 commented 6 months ago

when i am converting from english to roman urdu, it is giving error:

`

IndexError Traceback (most recent call last) Cell In[21], line 14 12 encoder_self_attention_mask, decoder_self_attention_mask, decoder_cross_attention_mask = create_masks(eng_batch, ur_batch) 13 optim.zero_grad() ---> 14 ur_predictions = transformer(eng_batch, 15 ur_batch, 16 encoder_self_attention_mask.to(device), 17 decoder_self_attention_mask.to(device), 18 decoder_cross_attention_mask.to(device), 19 enc_start_token=False, 20 enc_end_token=False, 21 dec_start_token=True, 22 dec_end_token=True) 23 labels = transformer.decoder.sentence_embedding.batch_tokenize(ur_batch, start_token=False, end_token=True) 24 loss = criterian( 25 ur_predictions.view(-1, ur_vocab_size).to(device), 26 labels.view(-1).to(device) 27 ).to(device)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1511, in Module._wrapped_call_impl(self, *args, kwargs) 1509 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1510 else: -> 1511 return self._call_impl(args, kwargs)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1520, in Module._call_impl(self, *args, *kwargs) 1515 # If we don't have any hooks, we want to skip the rest of the logic in 1516 # this function, and just call forward. 1517 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1518 or _global_backward_pre_hooks or _global_backward_hooks 1519 or _global_forward_hooks or _global_forward_pre_hooks): -> 1520 return forward_call(args, **kwargs) 1522 try: 1523 result = None

File ~\Desktop\FYP\Web App\Meme_AI\transformer model\transformer_model.py:302, in Transformer.forward(self, x, y, encoder_self_attention_mask, decoder_self_attention_mask, decoder_cross_attention_mask, enc_start_token, enc_end_token, dec_start_token, dec_end_token) 292 def forward(self, 293 x, 294 y, (...) 300 dec_start_token=False, # We should make this true 301 dec_end_token=False): # x, y are batch of sentences --> 302 x = self.encoder(x, encoder_self_attention_mask, start_token=enc_start_token, end_token=enc_end_token) 303 out = self.decoder(x, y, decoder_self_attention_mask, decoder_cross_attention_mask, start_token=dec_start_token, end_token=dec_end_token) 304 out = self.linear(out)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1511, in Module._wrapped_call_impl(self, *args, kwargs) 1509 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1510 else: -> 1511 return self._call_impl(args, kwargs)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1520, in Module._call_impl(self, *args, *kwargs) 1515 # If we don't have any hooks, we want to skip the rest of the logic in 1516 # this function, and just call forward. 1517 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1518 or _global_backward_pre_hooks or _global_backward_hooks 1519 or _global_forward_hooks or _global_forward_pre_hooks): -> 1520 return forward_call(args, **kwargs) 1522 try: 1523 result = None

File ~\Desktop\FYP\Web App\Meme_AI\transformer model\transformer_model.py:179, in Encoder.forward(self, x, self_attention_mask, start_token, end_token) 178 def forward(self, x, self_attention_mask, start_token, end_token): --> 179 x = self.sentence_embedding(x, start_token, end_token) 180 x = self.layers(x, self_attention_mask) 181 return x

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1511, in Module._wrapped_call_impl(self, *args, kwargs) 1509 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1510 else: -> 1511 return self._call_impl(args, kwargs)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1520, in Module._call_impl(self, *args, *kwargs) 1515 # If we don't have any hooks, we want to skip the rest of the logic in 1516 # this function, and just call forward. 1517 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1518 or _global_backward_pre_hooks or _global_backward_hooks 1519 or _global_forward_hooks or _global_forward_pre_hooks): -> 1520 return forward_call(args, **kwargs) 1522 try: 1523 result = None

File ~\Desktop\FYP\Web App\Meme_AI\transformer model\transformer_model.py:72, in SentenceEmbedding.forward(self, x, start_token, end_token) 70 def forward(self, x, start_token, end_token): # sentence 71 x = self.batch_tokenize(x, start_token, end_token) ---> 72 x = self.embedding(x) 73 pos = self.position_encoder().to(get_device()) 74 x = self.dropout(x + pos)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1511, in Module._wrapped_call_impl(self, *args, kwargs) 1509 return self._compiled_call_impl(*args, *kwargs) # type: ignore[misc] 1510 else: -> 1511 return self._call_impl(args, kwargs)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\module.py:1520, in Module._call_impl(self, *args, *kwargs) 1515 # If we don't have any hooks, we want to skip the rest of the logic in 1516 # this function, and just call forward. 1517 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks 1518 or _global_backward_pre_hooks or _global_backward_hooks 1519 or _global_forward_hooks or _global_forward_pre_hooks): -> 1520 return forward_call(args, **kwargs) 1522 try: 1523 result = None

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\modules\sparse.py:163, in Embedding.forward(self, input) 162 def forward(self, input: Tensor) -> Tensor: --> 163 return F.embedding( 164 input, self.weight, self.padding_idx, self.max_norm, 165 self.norm_type, self.scale_grad_by_freq, self.sparse)

File c:\Users\AAHIL ALWANI\Desktop\FYP\Web App\Meme_AI\meme\Lib\site-packages\torch\nn\functional.py:2237, in embedding(input, weight, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse) 2231 # Note [embedding_renorm set_grad_enabled] 2232 # XXX: equivalent to 2233 # with torch.no_grad(): 2234 # torch.embeddingrenorm 2235 # remove once script supports set_grad_enabled 2236 _no_grad_embeddingrenorm(weight, input, max_norm, norm_type) -> 2237 return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)

IndexError: index out of range in self `

QUINDL-TECHNOLOGIES commented 5 months ago

@aahilalwani25 Even I tried solving this issue but I cant. Do you have any way for overriding this issue?

Subhasissahoo407 commented 4 months ago

@QUINDL-TECHNOLOGIES @aahilalwani25 @ajhalthor I also face this error when converting from english to hindi. Is this issue is resolved?