ssuai / using_wandb

Examples to use wand
0 stars 0 forks source link

Check the list of available parameters with `estimator.get_params().keys()` #1

Open ys7yoo opened 3 months ago

ys7yoo commented 3 months ago
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Cell In[57], line 72
     70 # Perform GridSearchCV with custom scoring function
     71 grid = GridSearchCV(estimator=model, param_grid=param_grid, scoring=custom_scoring, cv=tscv, verbose=2)
---> 72 grid_result = grid.fit(x_train, y_train)
     74 # Extract and store results
     75 results_dict[f'Dataset_{i}_x{i}_y{i}'] = {
     76     'Best Score': grid_result.best_score_,
     77     'Best Params': grid_result.best_params_,
     78     'MSE': mean_squared_error(y_test, grid_result.predict(x_test))
     79 }

File ~/tf2/lib/python3.10/site-packages/sklearn/base.py:1473, in _fit_context.<locals>.decorator.<locals>.wrapper(estimator, *args, **kwargs)
   1466     estimator._validate_params()
   1468 with config_context(
   1469     skip_parameter_validation=(
   1470         prefer_skip_nested_validation or global_skip_validation
   1471     )
   1472 ):
-> 1473     return fit_method(estimator, *args, **kwargs)

File ~/tf2/lib/python3.10/site-packages/sklearn/model_selection/_search.py:968, in BaseSearchCV.fit(self, X, y, **params)
    962     results = self._format_results(
    963         all_candidate_params, n_splits, all_out, all_more_results
    964     )
    966     return results
--> 968 self._run_search(evaluate_candidates)
    970 # multimetric is determined here because in the case of a callable
    971 # self.scoring the return type is only known after calling
    972 first_test_score = all_out[0]["test_scores"]

File ~/tf2/lib/python3.10/site-packages/sklearn/model_selection/_search.py:1543, in GridSearchCV._run_search(self, evaluate_candidates)
   1541 def _run_search(self, evaluate_candidates):
   1542     """Search all candidates in param_grid"""
-> 1543     evaluate_candidates(ParameterGrid(self.param_grid))

File ~/tf2/lib/python3.10/site-packages/sklearn/model_selection/_search.py:914, in BaseSearchCV.fit.<locals>.evaluate_candidates(candidate_params, cv, more_results)
    906 if self.verbose > 0:
    907     print(
    908         "Fitting {0} folds for each of {1} candidates,"
    909         " totalling {2} fits".format(
    910             n_splits, n_candidates, n_candidates * n_splits
    911         )
    912     )
--> 914 out = parallel(
    915     delayed(_fit_and_score)(
    916         clone(base_estimator),
    917         X,
    918         y,
    919         train=train,
    920         test=test,
    921         parameters=parameters,
    922         split_progress=(split_idx, n_splits),
    923         candidate_progress=(cand_idx, n_candidates),
    924         **fit_and_score_kwargs,
    925     )
    926     for (cand_idx, parameters), (split_idx, (train, test)) in product(
    927         enumerate(candidate_params),
    928         enumerate(cv.split(X, y, **routed_params.splitter.split)),
    929     )
    930 )
    932 if len(out) < 1:
    933     raise ValueError(
    934         "No fits were performed. "
    935         "Was the CV iterator empty? "
    936         "Were there no candidates?"
    937     )

File ~/tf2/lib/python3.10/site-packages/sklearn/utils/parallel.py:67, in Parallel.__call__(self, iterable)
     62 config = get_config()
     63 iterable_with_config = (
     64     (_with_config(delayed_func, config), args, kwargs)
     65     for delayed_func, args, kwargs in iterable
     66 )
---> 67 return super().__call__(iterable_with_config)

File ~/tf2/lib/python3.10/site-packages/joblib/parallel.py:1863, in Parallel.__call__(self, iterable)
   1861     output = self._get_sequential_output(iterable)
   1862     next(output)
-> 1863     return output if self.return_generator else list(output)
   1865 # Let's create an ID that uniquely identifies the current call. If the
   1866 # call is interrupted early and that the same instance is immediately
   1867 # re-used, this id will be used to prevent workers that were
   1868 # concurrently finalizing a task from the previous call to run the
   1869 # callback.
   1870 with self._lock:

File ~/tf2/lib/python3.10/site-packages/joblib/parallel.py:1792, in Parallel._get_sequential_output(self, iterable)
   1790 self.n_dispatched_batches += 1
   1791 self.n_dispatched_tasks += 1
-> 1792 res = func(*args, **kwargs)
   1793 self.n_completed_tasks += 1
   1794 self.print_progress()

File ~/tf2/lib/python3.10/site-packages/sklearn/utils/parallel.py:129, in _FuncWrapper.__call__(self, *args, **kwargs)
    127     config = {}
    128 with config_context(**config):
--> 129     return self.function(*args, **kwargs)

File ~/tf2/lib/python3.10/site-packages/sklearn/model_selection/_validation.py:876, in _fit_and_score(estimator, X, y, scorer, train, test, verbose, parameters, fit_params, score_params, return_train_score, return_parameters, return_n_test_samples, return_times, return_estimator, split_progress, candidate_progress, error_score)
    869 score_params_test = _check_method_params(X, params=score_params, indices=test)
    871 if parameters is not None:
    872     # here we clone the parameters, since sometimes the parameters
    873     # themselves might be estimators, e.g. when we search over different
    874     # estimators in a pipeline.
    875     # ref: https://github.com/scikit-learn/scikit-learn/pull/26786
--> 876     estimator = estimator.set_params(**clone(parameters, safe=False))
    878 start_time = time.time()
    880 X_train, y_train = _safe_split(estimator, X, y, train)

File ~/tf2/lib/python3.10/site-packages/scikeras/wrappers.py:1175, in BaseWrapper.set_params(self, **params)
   1171             super().set_params(**{param: value})
   1172         except ValueError:
   1173             # Give a SciKeras specific user message to aid
   1174             # in moving from the Keras wrappers
-> 1175             raise ValueError(
   1176                 f"Invalid parameter {param} for estimator {self.__name__}."
   1177                 "\nThis issue can likely be resolved by setting this parameter"
   1178                 f" in the {self.__name__} constructor:"
   1179                 f"\n`{self.__name__}({param}={value})`"
   1180                 "\nCheck the list of available parameters with"
   1181                 " `estimator.get_params().keys()`"
   1182             ) from None
   1183 return self

ValueError: Invalid parameter activation for estimator KerasRegressor.
This issue can likely be resolved by setting this parameter in the KerasRegressor constructor:
`KerasRegressor(activation=relu)`
Check the list of available parameters with `estimator.get_params().keys()`
ys7yoo commented 3 months ago

https://stackoverflow.com/questions/70250928/activation-parameter-not-working-in-gridsearch