rstudio / keras3

R Interface to Keras
https://keras3.posit.co/
Other
838 stars 282 forks source link

Using Keras and kerastuneR: RuntimeError: Evaluation error: unused arguments #1332

Open pauldhami opened 2 years ago

pauldhami commented 2 years ago

I am trying to adopt the following code for Bayesian optimization to use with my own data:

https://eagerai.github.io/kerastuneR/articles/BayesianOptimisation.html

I have the following:

# With Hyper-Parameter Tuning
build_model <- function(hp) {    

  'Builds a ANN model'
  inputs = keras$Input(shape=dim(Xtrain_pca_scores_scaled)[[2]])

  x = inputs

  # Tune the number of layers
  for (i in range(hp$Int("num_layers", 1, 2))) {
    x = layer_dense(units = hp$Int(paste('units_', i, sep = ''), 
                                   min_value = 10,  
                                   max_value = 50, 
                                   step = 10), 
                    activation = "relu")(x)
    x =  layer_dropout(rate = hp$Float(paste('rate_', i, sep  = ''), 
                                       min_value = 0,  
                                       max_value = 0.5, 
                                       step = 0.1))(x)
  }

  outputs =  layer_dense(units = 1)(x)

  model = keras$Model(inputs, outputs)

  model %>%  compile(
    optimizer = "adam",
    loss = "mse",
    metrics = c("mae"))

  return(model)
}

build_model(HyperParameters()) 
################################################################################################################################################
MyTuner = PyClass(
  'Tuner',
  inherit = Tuner_class(),
  list(
    run_trial = function(self, trial, train_ds){
      hp = trial$hyperparameters
      model = self$hypermodel$build(trial$hyperparameters)
      optimizer = tf$keras$optimizers$Adam()
      epoch_loss_metric = tf$keras$metrics$Mean()
    }
  )
)
################################################################################################################################################

main = function () {
  tuner = MyTuner(
    oracle=BayesianOptimization(
      objective= 'mse',
      max_trials=50),
    hypermodel=build_model,
    project_name='Bayesian_Optimization')

  stop_early <- callback_early_stopping(monitor = "mae",
                                        patience = 5,
                                        min_delta = 0.25,
                                        mode = "min")

  tuner %>% fit_tuner(x = np_array(Xtrain_pca_scores_scaled),
                      y  = np_array(train_targets),
                      epochs = 50, 
                      callbacks = c(stop_early))

  best_model = tuner %>% get_best_models(1)

}

main()

When calling:

 tuner %>% fit_tuner(x = np_array(Xtrain_pca_scores_scaled),
                      y  = np_array(train_targets),
                      epochs = 50, 
                      callbacks = c(stop_early))

I get the following error:

error in py_call_impl(callable, dots$args, dots$keywords) : 
  RuntimeError: Evaluation error: unused arguments (x = c(-......))

Apologizes if this isn't appropriate here, but I've been lost for a while now working on it. Any help/hints would be greatly appreciated.

t-kalinowski commented 2 years ago

Hi, thanks for filing. Can you please provide a reprex? When I try to run the code I get:

Error in py_resolve_dots(list(...)) : 
object 'Xtrain_pca_scores_scaled' not found
pauldhami commented 2 years ago

Greetings,

Apologizes for that. Below is a reproducible example:

#################################################################################################################################
Xdata <- matrix(rnorm(1000),nrow=100)
ydata <- matrix(rnorm(100),nrow=100)
#################################################################################################################################

# With Hyper-Parameter Tuning
build_model <- function(hp) {    

  'Builds a ANN model'
  inputs = keras$Input(shape=dim(Xdata)[[2]])

  x = inputs

  # Tune the number of layers
  for (i in range(hp$Int("num_layers", 1, 2))) {
    x = layer_dense(units = hp$Int(paste('units_', i, sep = ''), 
                                   min_value = 10,  
                                   max_value = 50, 
                                   step = 10), 
                    activation = "relu")(x)
    x =  layer_dropout(rate = hp$Float(paste('rate_', i, sep  = ''), 
                                       min_value = 0,  
                                       max_value = 0.5, 
                                       step = 0.1))(x)
  }

  outputs =  layer_dense(units = 1)(x)

  model = keras$Model(inputs, outputs)

  model %>%  compile(
    optimizer = "adam",
    loss = "mse",
    metrics = c("mae"))

  return(model)
}

build_model(HyperParameters()) 
################################################################################################################################################
MyTuner = PyClass(
  'Tuner',
  inherit = Tuner_class(),
  list(
    run_trial = function(self, trial, train_ds){
      hp = trial$hyperparameters
      model = self$hypermodel$build(trial$hyperparameters)
      optimizer = tf$keras$optimizers$Adam()
      epoch_loss_metric = tf$keras$metrics$Mean()
    }
  )
)
################################################################################################################################################

main = function () {
  tuner = MyTuner(
    oracle=BayesianOptimization(
      objective= "val_mean_absolute_error",
      max_trials=50),
    hypermodel=build_model,
    project_name='Bayesian_Optimization')

  stop_early <- callback_early_stopping(monitor = "mae",
                                        patience = 5,
                                        min_delta = 0.25,
                                        mode = "min")

  tuner %>% fit_tuner(np_array(Xdata),
                      np_array(ydata),
                      epochs = 50, 
                      callbacks = c(stop_early))

  best_model = tuner %>% get_best_models(1)

}
#################################################################################################################################
main()