sktime / pytorch-forecasting

Time series forecasting with PyTorch
https://pytorch-forecasting.readthedocs.io/
MIT License
4k stars 632 forks source link

NHiTS instantiation crashes if n_layers is is not the default value #1225

Open fariedabuzaid opened 1 year ago

fariedabuzaid commented 1 year ago

Expected behavior

Changing n_layers in NHiTS model to increase the number of layers.

Actual behavior

Initialization crashes

---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
Cell In [6], line 10
      2 pl.seed_everything(42)
      3 trainer = pl.Trainer(
      4     accelerator='cpu',
      5     max_epochs=5,
      6     logger=CSVLogger(".")
      7 )
---> 10 nhits = NHiTS.from_dataset(
     11     training_nhits,
     12     hidden_size=128,
     13     learning_rate=0.03,
     14     dropout=0.1,  
     15     output_size=7,  # 7 quantiles by default
     16     loss=QuantileLoss(),
     17     n_blocks=[1,1,1],
     18     pooling_sizes = [12, 3, 1],
     19     downsample_frequencies=[12, 3, 1],
     20     reduce_on_plateau_patience=1,
     21     naive_level=False,
     22     pooling_mode="average",
     23     n_layers=3
     24 )
     25 print(f"Number of parameters in network: {nhits.size()/1e3:.1f}k")

File /opt/anaconda3/envs/tfl_training_explainable_ai/lib/python3.9/site-packages/pytorch_forecasting/models/nhits/__init__.py:348, in NHiTS.from_dataset(cls, dataset, **kwargs)
    341 assert (new_kwargs.get("backcast_loss_ratio", 0) == 0) | (
    342     isinstance(new_kwargs["output_size"], int) and new_kwargs["output_size"] == 1
    343 ) or all(
    344     o == 1 for o in new_kwargs["output_size"]
    345 ), "output sizes can only be of size 1, i.e. point forecasts if backcast_loss_ratio > 0"
    347 # initialize class
--> 348 return super().from_dataset(dataset, **new_kwargs)

File /opt/anaconda3/envs/tfl_training_explainable_ai/lib/python3.9/site-packages/pytorch_forecasting/models/base_model.py:1483, in BaseModelWithCovariates.from_dataset(cls, dataset, allowed_encoder_known_variable_names, **kwargs)
   1463 new_kwargs = dict(
   1464     static_categoricals=dataset.static_categoricals,
   1465     time_varying_categoricals_encoder=[
   (...)
   1480     categorical_groups=dataset.variable_groups,
   1481 )
   1482 new_kwargs.update(kwargs)
-> 1483 return super().from_dataset(dataset, **new_kwargs)

File /opt/anaconda3/envs/tfl_training_explainable_ai/lib/python3.9/site-packages/pytorch_forecasting/models/base_model.py:996, in BaseModel.from_dataset(cls, dataset, **kwargs)
    994 if "output_transformer" not in kwargs:
    995     kwargs["output_transformer"] = dataset.target_normalizer
--> 996 net = cls(**kwargs)
    997 net.dataset_parameters = dataset.get_parameters()
    998 if dataset.multi_target:

File /opt/anaconda3/envs/tfl_training_explainable_ai/lib/python3.9/site-packages/pytorch_forecasting/models/nhits/__init__.py:177, in NHiTS.__init__(self, output_size, static_categoricals, static_reals, time_varying_categoricals_encoder, time_varying_categoricals_decoder, categorical_groups, time_varying_reals_encoder, time_varying_reals_decoder, embedding_sizes, embedding_paddings, embedding_labels, x_reals, x_categoricals, context_length, prediction_length, static_hidden_size, naive_level, shared_weights, activation, initialization, n_blocks, n_layers, hidden_size, pooling_sizes, downsample_frequencies, pooling_mode, interpolation_mode, batch_normalization, dropout, learning_rate, log_interval, log_gradient_flow, log_val_interval, weight_decay, loss, reduce_on_plateau_patience, backcast_loss_ratio, logging_metrics, **kwargs)
    168 super().__init__(loss=loss, logging_metrics=logging_metrics, **kwargs)
    170 self.embeddings = MultiEmbedding(
    171     embedding_sizes=self.hparams.embedding_sizes,
    172     categorical_groups=self.hparams.categorical_groups,
    173     embedding_paddings=self.hparams.embedding_paddings,
    174     x_categoricals=self.hparams.x_categoricals,
    175 )
--> 177 self.model = NHiTSModule(
    178     context_length=self.hparams.context_length,
    179     prediction_length=self.hparams.prediction_length,
    180     output_size=to_list(output_size),
    181     static_size=self.static_size,
    182     covariate_size=self.covariate_size,
    183     static_hidden_size=self.hparams.static_hidden_size,
    184     n_blocks=self.hparams.n_blocks,
    185     n_layers=self.hparams.n_layers,
    186     hidden_size=self.n_stacks * [2 * [self.hparams.hidden_size]],
    187     pooling_sizes=self.hparams.pooling_sizes,
    188     downsample_frequencies=self.hparams.downsample_frequencies,
    189     pooling_mode=self.hparams.pooling_mode,
    190     interpolation_mode=self.hparams.interpolation_mode,
    191     dropout=self.hparams.dropout,
    192     activation=self.hparams.activation,
    193     initialization=self.hparams.initialization,
    194     batch_normalization=self.hparams.batch_normalization,
    195     shared_weights=self.hparams.shared_weights,
    196     naive_level=self.hparams.naive_level,
    197 )

File /opt/anaconda3/envs/tfl_training_explainable_ai/lib/python3.9/site-packages/pytorch_forecasting/models/nhits/sub_modules.py:237, in NHiTS.__init__(self, context_length, prediction_length, output_size, static_size, covariate_size, static_hidden_size, n_blocks, n_layers, hidden_size, pooling_sizes, downsample_frequencies, pooling_mode, interpolation_mode, dropout, activation, initialization, batch_normalization, shared_weights, naive_level)
    234 self.output_size = output_size
    235 self.naive_level = naive_level
--> 237 blocks = self.create_stack(
    238     n_blocks=n_blocks,
    239     context_length=context_length,
    240     prediction_length=prediction_length,
    241     output_size=output_size,
    242     covariate_size=covariate_size,
    243     static_size=static_size,
    244     static_hidden_size=static_hidden_size,
    245     n_layers=n_layers,
    246     hidden_size=hidden_size,
    247     pooling_sizes=pooling_sizes,
    248     downsample_frequencies=downsample_frequencies,
    249     pooling_mode=pooling_mode,
    250     interpolation_mode=interpolation_mode,
    251     batch_normalization=batch_normalization,
    252     dropout=dropout,
    253     activation=activation,
    254     shared_weights=shared_weights,
    255     initialization=initialization,
    256 )
    257 self.blocks = torch.nn.ModuleList(blocks)

File /opt/anaconda3/envs/tfl_training_explainable_ai/lib/python3.9/site-packages/pytorch_forecasting/models/nhits/sub_modules.py:302, in NHiTS.create_stack(self, n_blocks, context_length, prediction_length, output_size, covariate_size, static_size, static_hidden_size, n_layers, hidden_size, pooling_sizes, downsample_frequencies, pooling_mode, interpolation_mode, batch_normalization, dropout, activation, shared_weights, initialization)
    295     n_theta = max(prediction_length // downsample_frequencies[i], 1)
    296     basis = IdentityBasis(
    297         backcast_size=context_length,
    298         forecast_size=prediction_length,
    299         interpolation_mode=interpolation_mode,
    300     )
--> 302     nbeats_block = NHiTSBlock(
    303         context_length=context_length,
    304         prediction_length=prediction_length,
    305         output_size=output_size,
    306         covariate_size=covariate_size,
    307         static_size=static_size,
    308         static_hidden_size=static_hidden_size,
    309         n_theta=n_theta,
    310         hidden_size=hidden_size[i],
    311         pooling_sizes=pooling_sizes[i],
    312         pooling_mode=pooling_mode,
    313         basis=basis,
    314         n_layers=n_layers[i],
    315         batch_normalization=batch_normalization_block,
    316         dropout=dropout,
    317         activation=activation,
    318     )
    320 # Select type of evaluation and apply it to all layers of block
    321 init_function = partial(init_weights, initialization=initialization)

File /opt/anaconda3/envs/tfl_training_explainable_ai/lib/python3.9/site-packages/pytorch_forecasting/models/nhits/sub_modules.py:144, in NHiTSBlock.__init__(self, context_length, prediction_length, output_size, covariate_size, static_size, static_hidden_size, n_theta, hidden_size, pooling_sizes, pooling_mode, basis, n_layers, batch_normalization, dropout, activation)
    142 hidden_layers = []
    143 for i in range(n_layers):
--> 144     hidden_layers.append(nn.Linear(in_features=self.hidden_size[i], out_features=self.hidden_size[i + 1]))
    145     hidden_layers.append(activ)
    147     if self.batch_normalization:

IndexError: list index out of range

Code to reproduce the problem

nhits = NHiTS.from_dataset(
    ...,
    n_layers=3
)

Hi,

My name is Faried Abu Zaid. I'm a researcher at the appliedAI institute. We are going to use your library in one of our upcoming courses on explainable AI and I came across the above mentioned error during the preparation.

I peeked a little into the code, and I think the error is caused because of way the NHiTSModule is instantiated. hidden_size is defined as

hidden_size=self.n_stacks * [2 * [self.hparams.hidden_size]],

I think the hardcoded 2 must be replaced appropriately. We could propose a patch, if you like.

BR, Faried Abu Zaid

akosfurton commented 1 year ago

Yes please! I also faced the same issue when trying to set n_layers to 1

fariedabuzaid commented 1 year ago

Hi @akosfurton, I hope I can find some soon to prepare a PR.

safersephy commented 8 months ago

This is still a problem sadly