jaredleekatzman / DeepSurv

DeepSurv is a deep learning approach to survival analysis.
MIT License
566 stars 166 forks source link

TypeError: Unknown parameter type: <class 'theano.tensor.var.TensorVariable'> #70

Open eleniboukouvala opened 3 years ago

eleniboukouvala commented 3 years ago

I tried to run the example notebook and at first it worked fine, but when I run it after some time, I got this error TypeError: Unknown parameter type: <class 'theano.tensor.var.TensorVariable'>. I didn't change anything in the given code.

typerror

/usr/local/lib/python3.7/dist-packages/deepsurv/deep_surv.py in train(self, train_data, valid_data, n_epochs, validation_frequency, patience, improvement_threshold, patience_increase, logger, update_fn, verbose, kwargs) 414 learning_rate=lr, 415 momentum = momentum, --> 416 update_fn = update_fn, kwargs 417 ) 418

/usr/local/lib/python3.7/dist-packages/deepsurv/deep_surv.py in _get_train_valid_fn(self, L1_reg, L2_reg, learning_rate, kwargs) 252 loss, updates = self._get_loss_updates( 253 L1_reg, L2_reg, deterministic = False, --> 254 learning_rate=learning_rate, kwargs 255 ) 256 train_fn = theano.function(

/usr/local/lib/python3.7/dist-packages/deepsurv/deep_surv.py in _get_loss_updates(self, L1_reg, L2_reg, update_fn, max_norm, deterministic, momentum, *kwargs) 199 self._negative_log_likelihood(self.E, deterministic) 200 + regularize_layer_params(self.network,l1) L1_reg --> 201 + regularize_layer_params(self.network, l2) * L2_reg 202 ) 203

/usr/local/lib/python3.7/dist-packages/deepsurv/deep_surv.py in _negative_log_likelihood(self, E, deterministic) 160 partial Cox likelihood 161 """ --> 162 risk = self.risk(deterministic) 163 hazard_ratio = T.exp(risk) 164 log_risk = T.log(T.extra_ops.cumsum(hazard_ratio))

/usr/local/lib/python3.7/dist-packages/deepsurv/deep_surv.py in risk(self, deterministic) 561 """ 562 return lasagne.layers.get_output(self.network, deterministic = deterministic) --> 563 564 def predict_risk(self, x): 565 """

/usr/local/lib/python3.7/dist-packages/lasagne/layers/helper.py in get_output(layer_or_layers, inputs, kwargs) 195 "mapping this layer to an input expression." 196 % layer) --> 197 all_outputs[layer] = layer.get_output_for(layer_inputs, kwargs) 198 try: 199 accepted_kwargs |= set(utils.inspect_kwargs(

/usr/local/lib/python3.7/dist-packages/lasagne/layers/noise.py in get_output_for(self, input, deterministic, **kwargs) 100 for a, s in enumerate(mask_shape)) 101 mask = self._srng.binomial(mask_shape, p=retain_prob, dtype=input.dtype) --> 102 if self.shared_axes: 103 bcast = tuple(bool(s == 1) for s in mask_shape) 104 mask = T.patternbroadcast(mask, bcast)

/usr/local/lib/python3.7/dist-packages/theano/sandbox/rng_mrg.py in binomial(self, size, n, p, ndim, dtype, nstreams, kwargs) 899 p = undefined_grad(as_tensor_variable(p)) 900 x = self.uniform(size=size, nstreams=nstreams, kwargs) --> 901 return cast(x < p, dtype) 902 else: 903 raise NotImplementedError("MRG_RandomStreams.binomial with n > 1")

/usr/local/lib/python3.7/dist-packages/theano/sandbox/rng_mrg.py in uniform(self, size, low, high, ndim, dtype, nstreams, **kwargs) 870 nstreams = self.n_streams(size) 871 rstates = self.get_substream_rstates(nstreams, dtype) --> 872 873 d = {} 874 if 'target' in kwargs:

/usr/local/lib/python3.7/dist-packages/theano/configparser.py in res(*args, kwargs) 115 def res(*args, *kwargs): 116 with self: --> 117 return f(args, kwargs) 118 return res 119

/usr/local/lib/python3.7/dist-packages/theano/sandbox/rng_mrg.py in get_substream_rstates(self, n_streams, dtype, inc_rstate) 777 if multMatVect.dot_modulo is None: 778 multMatVect(rval[0], A1p72, M1, A2p72, M2) --> 779 780 # This way of calling the Theano fct is done to bypass Theano overhead. 781 f = multMatVect.dot_modulo

/usr/local/lib/python3.7/dist-packages/theano/sandbox/rng_mrg.py in multMatVect(v, A, m1, B, m2) 60 o = DotModulo()(A_sym, s_sym, m_sym, A2_sym, s2_sym, m2_sym) 61 multMatVect.dot_modulo = function([A_sym, s_sym, m_sym, A2_sym, s2_sym, m2_sym], o, profile=False) ---> 62 63 # This way of calling the Theano fct is done to bypass Theano overhead. 64 f = multMatVect.dot_modulo

/usr/local/lib/python3.7/dist-packages/theano/compile/function/init.py in function(inputs, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input) 348 on_unused_input=on_unused_input, 349 profile=profile, --> 350 output_keys=output_keys, 351 ) 352 return fn

/usr/local/lib/python3.7/dist-packages/theano/compile/function/pfunc.py in pfunc(params, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input, output_keys) 425 # transform params into theano.compile.In objects. 426 inputs = [ --> 427 _pfunc_param_to_in(p, allow_downcast=allow_input_downcast) for p in params 428 ] 429

/usr/local/lib/python3.7/dist-packages/theano/compile/function/pfunc.py in (.0) 425 # transform params into theano.compile.In objects. 426 inputs = [ --> 427 _pfunc_param_to_in(p, allow_downcast=allow_input_downcast) for p in params 428 ] 429

/usr/local/lib/python3.7/dist-packages/theano/compile/function/pfunc.py in _pfunc_param_to_in(param, strict, allow_downcast) 541 elif isinstance(param, In): 542 return param --> 543 raise TypeError(f"Unknown parameter type: {type(param)}") 544 545

TypeError: Unknown parameter type: <class 'theano.tensor.var.TensorVariable'>