dreamquark-ai / tabnet

PyTorch implementation of TabNet paper : https://arxiv.org/pdf/1908.07442.pdf
https://dreamquark-ai.github.io/tabnet/
MIT License
2.55k stars 470 forks source link

IndexError: index out of range in self #474

Closed yuanjie-ai closed 1 year ago

yuanjie-ai commented 1 year ago

image

IndexError Traceback (most recent call last) Cell In[235], line 1 ----> 1 clf.fit( 2 X, y 3 # eval_set=[(X_train, y_train), (X_test, y_test)] 4 )

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/pytorch_tabnet/abstract_model.py:241, in TabModel.fit(self, X_train, y_train, eval_set, eval_name, eval_metric, loss_fn, weights, max_epochs, patience, batch_size, virtual_batch_size, num_workers, drop_last, callbacks, pin_memory, from_unsupervised, warm_start, augmentations) 236 for epoch_idx in range(self.max_epochs): 237 238 # Call method on_epoch_begin for all callbacks 239 self._callback_container.on_epoch_begin(epoch_idx) --> 241 self._train_epoch(train_dataloader) 243 # Apply predict epoch to all eval sets 244 for eval_name, valid_dataloader in zip(eval_names, valid_dataloaders):

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/pytorch_tabnet/abstract_model.py:457, in TabModel._train_epoch(self, train_loader) 454 for batch_idx, (X, y) in enumerate(train_loader): 455 self._callback_container.on_batch_begin(batch_idx) --> 457 batch_logs = self._train_batch(X, y) 459 self._callback_container.on_batch_end(batch_idx, batch_logs) 461 epoch_logs = {"lr": self._optimizer.param_groups[-1]["lr"]}

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/pytorch_tabnet/abstract_model.py:495, in TabModel._train_batch(self, X, y) 492 for param in self.network.parameters(): 493 param.grad = None --> 495 output, M_loss = self.network(X) 497 loss = self.compute_loss(output, y) 498 # Add the overall sparsity loss

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/torch/nn/modules/module.py:1194, in Module._call_impl(self, *input, *kwargs) 1190 # If we don't have any hooks, we want to skip the rest of the logic in 1191 # this function, and just call forward. 1192 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks 1193 or _global_forward_hooks or _global_forward_pre_hooks): -> 1194 return forward_call(input, **kwargs) 1195 # Do not call functions when jit is used 1196 full_backward_hooks, non_full_backward_hooks = [], []

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/pytorch_tabnet/tab_network.py:585, in TabNet.forward(self, x) 584 def forward(self, x): --> 585 x = self.embedder(x) 586 return self.tabnet(x)

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/torch/nn/modules/module.py:1194, in Module._call_impl(self, *input, *kwargs) 1190 # If we don't have any hooks, we want to skip the rest of the logic in 1191 # this function, and just call forward. 1192 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks 1193 or _global_forward_hooks or _global_forward_pre_hooks): -> 1194 return forward_call(input, **kwargs) 1195 # Do not call functions when jit is used 1196 full_backward_hooks, non_full_backward_hooks = [], []

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/pytorch_tabnet/tab_network.py:861, in EmbeddingGenerator.forward(self, x) 858 cols.append(x[:, feat_init_idx].float().view(-1, 1)) 859 else: 860 cols.append( --> 861 self.embeddings[cat_feat_counter](x[:, feat_init_idx].long()) 862 ) 863 cat_feat_counter += 1 864 # concat

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/torch/nn/modules/module.py:1194, in Module._call_impl(self, *input, *kwargs) 1190 # If we don't have any hooks, we want to skip the rest of the logic in 1191 # this function, and just call forward. 1192 if not (self._backward_hooks or self._forward_hooks or self._forward_pre_hooks or _global_backward_hooks 1193 or _global_forward_hooks or _global_forward_pre_hooks): -> 1194 return forward_call(input, **kwargs) 1195 # Do not call functions when jit is used 1196 full_backward_hooks, non_full_backward_hooks = [], []

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/torch/nn/modules/sparse.py:160, in Embedding.forward(self, input) 159 def forward(self, input: Tensor) -> Tensor: --> 160 return F.embedding( 161 input, self.weight, self.padding_idx, self.max_norm, 162 self.norm_type, self.scale_grad_by_freq, self.sparse)

File /opt/homebrew/Caskroom/miniforge/base/envs/py38/lib/python3.8/site-packages/torch/nn/functional.py:2210, in embedding(input, weight, padding_idx, max_norm, norm_type, scale_grad_by_freq, sparse) 2204 # Note [embedding_renorm set_grad_enabled] 2205 # XXX: equivalent to 2206 # with torch.no_grad(): 2207 # torch.embeddingrenorm 2208 # remove once script supports set_grad_enabled 2209 _no_grad_embeddingrenorm(weight, input, max_norm, norm_type) -> 2210 return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)

IndexError: index out of range in self

Optimox commented 1 year ago

Please share a minimal reproducible code and more information. This is most probably not a bug but an error on your side as embeddings are tested in the CI.

Optimox commented 1 year ago

Please feel free to reopen once you have more information to share.