zalandoresearch / pytorch-ts

PyTorch based Probabilistic Time Series forecasting framework based on GluonTS backend
MIT License
1.21k stars 191 forks source link

cannot import name 'FuncType' from 'pandas._typing' (/opt/conda/lib/python3.7/site-packages/pandas/_typing.py) #75

Open kaleming opened 2 years ago

kaleming commented 2 years ago

I am getting this error when I try to start my trainning on kaggle:

def gluon_train(df, num_batches_per_epoch, cardinality):

    estimator = DeepAREstimator(  
                    freq='D', 
                    prediction_length=30,
                    context_length = 15, 
                    input_size=71,
                    trainer=Trainer(epochs=50,
                                    device=device,
                                    batch_size=32,
                                    learning_rate=2e-3, 
                                    patience=15,
                                    num_batches_per_epoch=num_batches_per_epoch),
                    lags_seq=[1, 2],
                    distr_output=ZeroInflatedNegativeBinomialOutput(),
                    use_feat_static_cat=True, 
                    use_feat_dynamic_real=True,
                    cardinality =  cardinality
                              )

    return estimator.train(training_data=df)       

---------------------------------------------------------------------------
ImportError                               Traceback (most recent call last)
<ipython-input-42-41ec9441a8e8> in <module>
      7 #               ]
      8 
----> 9 tr = gluon_train(train_ds, num_batches_per_epoch, cardinality)

<ipython-input-41-7d28eedff523> in gluon_train(df, num_batches_per_epoch, cardinality)
     19                               )
     20 
---> 21     return estimator.train(training_data=df)

/opt/conda/lib/python3.7/site-packages/pts/model/estimator.py in train(self, training_data, validation_data, num_workers, prefetch_factor, shuffle_buffer_length, cache_data, **kwargs)
    184             shuffle_buffer_length=shuffle_buffer_length,
    185             cache_data=cache_data,
--> 186             **kwargs,
    187         ).predictor

/opt/conda/lib/python3.7/site-packages/pts/model/estimator.py in train_model(self, training_data, validation_data, num_workers, prefetch_factor, shuffle_buffer_length, cache_data, **kwargs)
    152             net=trained_net,
    153             train_iter=training_data_loader,
--> 154             validation_iter=validation_data_loader,
    155         )
    156 

/opt/conda/lib/python3.7/site-packages/pts/trainer.py in __call__(self, net, train_iter, validation_iter)
     61             # training loop
     62             with tqdm(train_iter, total=total) as it:
---> 63                 for batch_no, data_entry in enumerate(it, start=1):
     64                     optimizer.zero_grad()
     65 

/opt/conda/lib/python3.7/site-packages/tqdm/notebook.py in __iter__(self, *args, **kwargs)
    216     def __iter__(self, *args, **kwargs):
    217         try:
--> 218             for obj in super(tqdm_notebook, self).__iter__(*args, **kwargs):
    219                 # return super(tqdm...) will not catch exception
    220                 yield obj

/opt/conda/lib/python3.7/site-packages/tqdm/std.py in __iter__(self)
   1125 
   1126         try:
-> 1127             for obj in iterable:
   1128                 yield obj
   1129                 # Update and possibly print the progressbar.

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in __next__(self)
    519             if self._sampler_iter is None:
    520                 self._reset()
--> 521             data = self._next_data()
    522             self._num_yielded += 1
    523             if self._dataset_kind == _DatasetKind.Iterable and \

/opt/conda/lib/python3.7/site-packages/torch/utils/data/dataloader.py in _next_data(self)
    559     def _next_data(self):
    560         index = self._next_index()  # may raise StopIteration
--> 561         data = self._dataset_fetcher.fetch(index)  # may raise StopIteration
    562         if self._pin_memory:
    563             data = _utils.pin_memory.pin_memory(data)

/opt/conda/lib/python3.7/site-packages/torch/utils/data/_utils/fetch.py in fetch(self, possibly_batched_index)
     26             for _ in possibly_batched_index:
     27                 try:
---> 28                     data.append(next(self.dataset_iter))
     29                 except StopIteration:
     30                     break

/opt/conda/lib/python3.7/site-packages/gluonts/transform/_base.py in __iter__(self)
    102     def __iter__(self) -> Iterator[DataEntry]:
    103         yield from self.transformation(
--> 104             self.base_dataset, is_train=self.is_train
    105         )
    106 

/opt/conda/lib/python3.7/site-packages/gluonts/transform/_base.py in __call__(self, data_it, is_train)
    121         self, data_it: Iterable[DataEntry], is_train: bool
    122     ) -> Iterator:
--> 123         for data_entry in data_it:
    124             try:
    125                 yield self.map_transform(data_entry.copy(), is_train)

/opt/conda/lib/python3.7/site-packages/gluonts/transform/_base.py in __call__(self, data_it, is_train)
    174     ) -> Iterator:
    175         num_idle_transforms = 0
--> 176         for data_entry in data_it:
    177             num_idle_transforms += 1
    178             for result in self.flatmap_transform(data_entry.copy(), is_train):

/opt/conda/lib/python3.7/site-packages/gluonts/transform/_base.py in __call__(self, data_it, is_train)
    121         self, data_it: Iterable[DataEntry], is_train: bool
    122     ) -> Iterator:
--> 123         for data_entry in data_it:
    124             try:
    125                 yield self.map_transform(data_entry.copy(), is_train)

/opt/conda/lib/python3.7/site-packages/gluonts/transform/_base.py in __call__(self, data_it, is_train)
    121         self, data_it: Iterable[DataEntry], is_train: bool
    122     ) -> Iterator:
--> 123         for data_entry in data_it:
    124             try:
    125                 yield self.map_transform(data_entry.copy(), is_train)

/opt/conda/lib/python3.7/site-packages/gluonts/transform/_base.py in __call__(self, data_it, is_train)
    125                 yield self.map_transform(data_entry.copy(), is_train)
    126             except Exception as e:
--> 127                 raise e
    128 
    129     @abc.abstractmethod

/opt/conda/lib/python3.7/site-packages/gluonts/transform/_base.py in __call__(self, data_it, is_train)
    123         for data_entry in data_it:
    124             try:
--> 125                 yield self.map_transform(data_entry.copy(), is_train)
    126             except Exception as e:
    127                 raise e

/opt/conda/lib/python3.7/site-packages/gluonts/transform/feature.py in map_transform(self, data, is_train)
    369             data[self.target_field], self.pred_length, is_train=is_train
    370         )
--> 371         self._update_cache(start, length)
    372         i0 = self._date_index[start]
    373         features = (

/opt/conda/lib/python3.7/site-packages/gluonts/transform/feature.py in _update_cache(self, start, length)
    356                 [feat(self.full_date_range) for feat in self.date_features]
    357             ).astype(self.dtype)
--> 358             if self.date_features
    359             else None
    360         )

/opt/conda/lib/python3.7/site-packages/gluonts/transform/feature.py in <listcomp>(.0)
    354         self._full_range_date_features = (
    355             np.vstack(
--> 356                 [feat(self.full_date_range) for feat in self.date_features]
    357             ).astype(self.dtype)
    358             if self.date_features

/opt/conda/lib/python3.7/site-packages/gluonts/time_feature/_base.py in __call__(self, index)
     70 
     71     def __call__(self, index: pd.DatetimeIndex) -> np.ndarray:
---> 72         return index.dayofweek / 6.0 - 0.5
     73 
     74 

/opt/conda/lib/python3.7/site-packages/pandas/core/indexes/base.py in index_arithmetic_method(self, other)
    137 from pandas.core.construction import (
    138     ensure_wrapped_if_datetimelike,
--> 139     extract_array,
    140     sanitize_array,
    141 )

/opt/conda/lib/python3.7/site-packages/pandas/core/ops/common.py in new_method(self, other)
     62                 if isinstance(self, cls):
     63                     break
---> 64                 if isinstance(other, cls):
     65                     return NotImplemented
     66 

/opt/conda/lib/python3.7/site-packages/pandas/core/ops/__init__.py in wrapper(left, right)

/opt/conda/lib/python3.7/site-packages/pandas/core/ops/array_ops.py in arithmetic_op(left, right, op, str_rep)
    195     right : object
    196         Cannot be a DataFrame or Index.  Series is *not* excluded.
--> 197     op : {operator.add, operator.sub, ...}
    198         Or one of the reversed variants from roperator.
    199 

/opt/conda/lib/python3.7/site-packages/pandas/core/ops/array_ops.py in na_arithmetic_op(left, right, op, str_rep)
    144     Parameters
    145     ----------
--> 146     left : np.ndarray
    147     right : np.ndarray or scalar
    148     is_cmp : bool, default False

/opt/conda/lib/python3.7/site-packages/pandas/core/computation/expressions.py in <module>
     15 from pandas._config import get_option
     16 
---> 17 from pandas._typing import FuncType
     18 
     19 from pandas.core.computation.check import NUMEXPR_INSTALLED

ImportError: cannot import name 'FuncType' from 'pandas._typing' (/opt/conda/lib/python3.7/site-packages/pandas/_typing.py)
kashif commented 2 years ago

@kaleming seems from googling that the issue might be an older version of pandas? Can you try to update your pandas?

kashif commented 2 years ago

@kaleming can I close this isse?

vrsivananda commented 2 years ago

@kashif I'm getting the same error just by trying to concatenate a string column with another string. E.g. df['str_col'] + 'a' I'm using Python3.7 on AWS SageMaker and the pandas version is 1.3.5. I ran pip install --upgrade pandas but it says that all requirements are satisfied. Do you have insight on what might be wrong?