autonomio / talos

Hyperparameter Experiments with TensorFlow and Keras
https://autonom.io
MIT License
1.62k stars 268 forks source link

type error first neuron #334

Closed elizabellatran closed 5 years ago

elizabellatran commented 5 years ago

I saw on troubleshooting page is says that when the keyerror is "first_neuron" the parameters for the first layer needs to be called "first_neuron."

But I currently do have my parameters as first neuron for my first layer. Is there something I'm missing? Or if anyone can see an error and point it out I would really appreciate it!

Error message, parameters, and model:



<ipython-input-162-fa3c1c7694dd> in <module>
      1 h = ta.Scan(x, y, params = params,
----> 2        model= bayes_opt_model)

~/.local/lib/python3.6/site-packages/talos/scan/Scan.py in __init__(self, x, y, params, model, experiment_name, x_val, y_val, val_split, random_method, performance_target, fraction_limit, round_limit, time_limit, boolean_limit, reduction_method, reduction_interval, reduction_window, reduction_threshold, reduction_metric, minimize_loss, seed, clear_session, disable_progress_bar, print_params, debug)
    170         # input parameters section ends
    171 
--> 172         self.runtime()
    173 
    174     def runtime(self):

~/.local/lib/python3.6/site-packages/talos/scan/Scan.py in runtime(self)
    175 
    176         from .scan_run import scan_run
--> 177         self = scan_run(self)

~/.local/lib/python3.6/site-packages/talos/scan/scan_run.py in scan_run(self)
      7 
      8     from .scan_prepare import scan_prepare
----> 9     self = scan_prepare(self)
     10 
     11     # initiate the progress bar

~/.local/lib/python3.6/site-packages/talos/scan/scan_prepare.py in scan_prepare(self)
     30                                    round_limit=self.round_limit,
     31                                    time_limit=self.time_limit,
---> 32                                    boolean_limit=self.boolean_limit
     33                                    )
     34 

~/.local/lib/python3.6/site-packages/talos/parameters/ParamSpace.py in __init__(self, params, param_keys, random_method, fraction_limit, round_limit, time_limit, boolean_limit)
     33 
     34         # create list of list from the params dictionary
---> 35         self._params_temp = [list(self.p[key]) for key in self.param_keys]
     36 
     37         # establish max dimensions

~/.local/lib/python3.6/site-packages/talos/parameters/ParamSpace.py in <listcomp>(.0)
     33 
     34         # create list of list from the params dictionary
---> 35         self._params_temp = [list(self.p[key]) for key in self.param_keys]
     36 
     37         # establish max dimensions
KeyError: first_neuron ```

`params= {
     "activation_type": ["linear", "elu", "LeakyReLU", "ReLU", "sigmoid", "tanh"],
     "hidden_layers": random.choice(range(1, 6, 1)),
     "first_neuron": random.choice((range(1, 11, 1))), 
     "hidden_neurons":  random.choice((range(1, 11, 1))),
      "is_batch_norm": [True],
      "max_epochs":  [50,100, 250, 500,1000,2500,5000,10000,25000],
      "mini_batch_size":[128],
      "optimizer_function": [Adam],
     "learning_rate": [np.array(round(np.random.uniform(0.00001, 0.1), 10))]
}`

```def bayes_opt_model(inputs, outputs, params, batchnorm = False, winit = 'xavier'):
    activation_type  = random.choice(params['activation_type'])
    hidden_layer_number = random.choice(params['hidden_layers'])

    def weight_initi(winit):
        if winit == 'xavier' :
            if activation_type == 'sigmoid' or activation_type == 'linear':
                factor = 1 # Xavier weight initialization factor
            elif activation_type== 'relu' or activation_type == 'LeakyReLU':
                factor = 2
            elif activation_type == 'tanh':
                factor = 16
            return factor
    factor = weight_initi(winit)

    def initializer_pick(curr_winit):
        if curr_winit == 'xavier':
            initialization =  initializers.glorot_uniform(factor)
        else:
            if activation == 'LeakyReLU':
                initialization = initializers.he_normal(factor)
            elif activation == 'relu' or activation == 'elu':
                initialization = initializers.he_normal(factor)
            else:
                initialization = initializers.he_normal(factor)
        return initialization

    initialization = initializer_pick(winit)
    #####################
    # Begin Model       #
    ####################
    model = Sequential()
    model.add(Dense(params['first_neuron'], input_dim = inputs.shape[1],activation = activation_type, kernel_initializer = initialization ))
    if batchnorm == True:
        model.add(BatchNormalization())

    ############################
    # Remaining hidden layers  #
    ############################
    for i in range(hidden_layer_number -1):
        model.add(Dense(units = params['hidden_neurons'], activation=activation_type, kernel_initializer = initialization))
        if batchnorm == True:
            model.add(BatchNormalization())    
            ############# ##
    model.add(Dense(units = params['hidden_neurons'], activation = activation_type, kernel_initializer = initialization))
    model.compile(optimizer = params['optimizer'](lr_normalizer(params['learning_rate'], params['optimizer_function'])), loss=params['losses'])

    ###############
    # Model summary#
    ################
    print(model.summary())
    out = model.fit(inputs, outputs, batch_size=params['batch_size'], epochs=params['epochs'],verbose= 1)

    #modify the output model
    return out, model```
elizabellatran commented 5 years ago

I realized that I had some spelling mistakes/names differently and that the parameters can take in range.

But it works now!