autonlab / auton-survival

Auton Survival - an open source package for Regression, Counterfactual Estimation, Evaluation and Phenotyping with Censored Time-to-Events
http://autonlab.github.io/auton-survival
MIT License
315 stars 74 forks source link

ValueError: optimizer got an empty parameter list #96

Open yihahn opened 1 year ago

yihahn commented 1 year ago
param_grid = {'k' : [3, 4, 6],
              'distribution' : ['LogNormal', 'Weibull'],
              'learning_rate' : [1e-4, 1e-3],
              'batch_size': [64, 128],
              'hidden': [50, 100],
              'layers': [3, 2, 1],
              'typ': ['LSTM', 'GRU', 'RNN'],
              'optim': ['Adam', 'SGD'],
             }
params = ParameterGrid(param_grid)

models = []
for param in params:
    model = DeepRecurrentSurvivalMachines(k = param['k'],
                                          distribution = param['distribution'],
                                          hidden = param['hidden'], 
                                          typ = param['typ'],
                                          layers = param['layers'])
    # The fit method is called to train the model
    model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'], 
             batch_size=param['batch_size'], optimizer=param['optim'])
    models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]])

best_model = min(models)
model = best_model[0][1]

As soon as I ran above script, I got below error. what should i do to solve this problem?

`--------------------------------------------------------------------------- ValueError Traceback (most recent call last)

in 8 # The fit method is called to train the model 9 model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'], ---> 10 batch_size=param['batch_size'], optimizer=param['optim']) 11 models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]]) 12 ~/data/nas125/hepa/codes/auton_survival/models/dsm/__init__.py in fit(self, x, t, e, vsize, val_data, iters, learning_rate, batch_size, elbo, optimizer) 265 elbo=elbo, 266 bs=batch_size, --> 267 random_seed=self.random_seed) 268 269 self.torch_model = model.eval() ~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in train_dsm(model, x_train, t_train, e_train, x_valid, t_valid, e_valid, n_iter, lr, elbo, bs, random_seed) 137 n_iter=10000, 138 lr=1e-2, --> 139 thres=1e-4) 140 141 for r in range(model.risks): ~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in pretrain_dsm(model, t_train, e_train, t_valid, e_valid, n_iter, lr, thres) 61 premodel.double() 62 ---> 63 optimizer = get_optimizer(premodel, lr) 64 65 oldcost = float('inf') ~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in get_optimizer(model, lr) 43 44 if model.optimizer == 'Adam': ---> 45 return torch.optim.Adam(model.parameters(), lr=lr) 46 elif model.optimizer == 'SGD': 47 return torch.optim.SGD(model.parameters(), lr=lr) ~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/adam.py in __init__(self, params, lr, betas, eps, weight_decay, amsgrad) 40 defaults = dict(lr=lr, betas=betas, eps=eps, 41 weight_decay=weight_decay, amsgrad=amsgrad) ---> 42 super(Adam, self).__init__(params, defaults) 43 44 def __setstate__(self, state): ~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/optimizer.py in __init__(self, params, defaults) 44 param_groups = list(params) 45 if len(param_groups) == 0: ---> 46 raise ValueError("optimizer got an empty parameter list") 47 if not isinstance(param_groups[0], dict): 48 param_groups = [{'params': param_groups}] ValueError: optimizer got an empty parameter list`
chiragnagpal commented 1 year ago

Interesting. . Can you try removing the , 'optim' hyperparam from the grid and rerunning it.. it should default to using Adam

yihahn commented 1 year ago

Thank you for rapid reply. I removed the 'optim' and 'batch_size' hyperparameters and ran the code again, but I still got the same error. Are there any other ways to fix this? Oh~! by the way The torch and torchvision versions below are installed.

pip list | grep torch
torch                         1.4.0
torchvision                   0.5.0
param_grid = {'k' : [3, 4, 6],
              'distribution' : ['LogNormal', 'Weibull'],
              'learning_rate' : [1e-4, 1e-3],
              'hidden': [50, 100],
              'layers': [3, 2, 1],
              'typ': ['LSTM', 'GRU', 'RNN']
             }
params = ParameterGrid(param_grid)

models = []
for param in params:
    model = DeepRecurrentSurvivalMachines(k = param['k'],
                                          distribution = param['distribution'],
                                          hidden = param['hidden'], 
                                          typ = param['typ'],
                                          layers = param['layers'])
    # The fit method is called to train the model
    model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'])
    models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]])

best_model = min(models)
model = best_model[0][1]
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-17-4e3a1c40b7ae> in <module>
      7                                           layers = param['layers'])
      8     # The fit method is called to train the model
----> 9     model.fit(x_train, t_train, e_train, iters = 1, learning_rate=param['learning_rate'])
     10     models.append([[model.compute_nll(x_valid, t_valid, e_valid), model]])
     11 

~/data/nas125/hepa/codes/auton_survival/models/dsm/__init__.py in fit(self, x, t, e, vsize, val_data, iters, learning_rate, batch_size, elbo, optimizer)
    265                          elbo=elbo,
    266                          bs=batch_size,
--> 267                          random_seed=self.random_seed)
    268 
    269     self.torch_model = model.eval()

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in train_dsm(model, x_train, t_train, e_train, x_valid, t_valid, e_valid, n_iter, lr, elbo, bs, random_seed)
    137                           n_iter=10000,
    138                           lr=1e-2,
--> 139                           thres=1e-4)
    140 
    141   for r in range(model.risks):

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in pretrain_dsm(model, t_train, e_train, t_valid, e_valid, n_iter, lr, thres)
     61   premodel.double()
     62 
---> 63   optimizer = get_optimizer(premodel, lr)
     64 
     65   oldcost = float('inf')

~/data/nas125/hepa/codes/auton_survival/models/dsm/utilities.py in get_optimizer(model, lr)
     43 
     44   if model.optimizer == 'Adam':
---> 45     return torch.optim.Adam(model.parameters(), lr=lr)
     46   elif model.optimizer == 'SGD':
     47     return torch.optim.SGD(model.parameters(), lr=lr)

~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/adam.py in __init__(self, params, lr, betas, eps, weight_decay, amsgrad)
     40         defaults = dict(lr=lr, betas=betas, eps=eps,
     41                         weight_decay=weight_decay, amsgrad=amsgrad)
---> 42         super(Adam, self).__init__(params, defaults)
     43 
     44     def __setstate__(self, state):

~/anaconda3/envs/ml/lib/python3.7/site-packages/torch/optim/optimizer.py in __init__(self, params, defaults)
     44         param_groups = list(params)
     45         if len(param_groups) == 0:
---> 46             raise ValueError("optimizer got an empty parameter list")
     47         if not isinstance(param_groups[0], dict):
     48             param_groups = [{'params': param_groups}]

ValueError: optimizer got an empty parameter list