dnouri / nolearn

Combines the ease of use of scikit-learn with the power of Theano/Lasagne
http://danielnouri.org/notes/2014/12/17/using-convolutional-neural-nets-to-detect-facial-keypoints-tutorial/
MIT License
948 stars 260 forks source link

TypeError: cost must be a scalar. #71

Closed aflyax closed 9 years ago

aflyax commented 9 years ago
from lasagne import layers

from lasagne.nonlinearities import softmax
from lasagne.updates import nesterov_momentum
from nolearn.lasagne import NeuralNet    

print(np.shape(X)) # (137, 43)
print(np.shape(y)) # (137,)

layers_s = [('input', layers.InputLayer),
           ('dense0', layers.DenseLayer),
           ('output', layers.DenseLayer)]

net_s = NeuralNet(layers=layers_s,

                 input_shape=(None, num_features),
                 dense0_num_units=43,
                 output_num_units=1,
                 output_nonlinearity=None,

                 regression=True,

                 update=nesterov_momentum,
                 update_learning_rate=0.01,
                 update_momentum=0.9,

                 eval_size=0.2,
                 verbose=1,
                 max_epochs=100)

net_s.fit(X, y)

I get the following error:

  DenseLayer    (None, 1)   produces 1 outputs
  DenseLayer    (None, 43)  produces 43 outputs
  InputLayer    (None, 43)  produces 43 outputs
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-53-44789ae899eb> in <module>()
----> 1 net_s.fit(np.log(X), np.log(y))

/home/alex/anaconda3/lib/python3.4/site-packages/nolearn/lasagne.py in fit(self, X, y)
    148             out, self.loss, self.update,
    149             self.X_tensor_type,
--> 150             self.y_tensor_type,
    151             )
    152         self.train_iter_, self.eval_iter_, self.predict_iter_ = iter_funcs

/home/alex/anaconda3/lib/python3.4/site-packages/nolearn/lasagne.py in _create_iter_funcs(self, output_layer, loss_func, update, input_type, output_type)
    298         all_params = get_all_params(output_layer)
    299         update_params = self._get_params_for('update')
--> 300         updates = update(loss_train, all_params, **update_params)
    301 
    302         train_iter = theano.function(

/home/alex/src/lasagne/lasagne/updates.py in sgd(loss, all_params, learning_rate)
     10 
     11 def sgd(loss, all_params, learning_rate):
---> 12     all_grads = theano.grad(loss, all_params)
     13     updates = []
     14 

/home/alex/anaconda3/lib/python3.4/site-packages/theano/gradient.py in grad(cost, wrt, consider_constant, disconnected_inputs, add_names, known_grads, return_disconnected)
    431 
    432     if cost is not None and cost.ndim != 0:
--> 433         raise TypeError("cost must be a scalar.")
    434 
    435     if isinstance(wrt, set):

TypeError: cost must be a scalar.
dnouri commented 9 years ago

See https://github.com/dnouri/kfkd-tutorial/issues/16

aflyax commented 9 years ago

Thanks. It works now.