facebookresearch / nevergrad

A Python toolbox for performing gradient-free optimization
https://facebookresearch.github.io/nevergrad/
MIT License
3.95k stars 353 forks source link

BO does not work #878

Closed valentinosantucci closed 3 years ago

valentinosantucci commented 3 years ago

Steps to reproduce

  1. run this code:

import nevergrad as ng import numpy as np p = ng.p.Array(shape=(2,)) lb,ub = np.ones(2)-5,np.ones(2)5 p.set_bounds(lb,ub) o = ng.optimizers.BO(parametrization=p,budget=100) def square(x): return np.sum(x**2) r = o.minimize(square)

Observed Results


StopIteration Traceback (most recent call last) ~\anaconda3\lib\site-packages\nevergrad\optimization\optimizerlib.py in _internal_ask_candidate(self) 1456 try: -> 1457 x_probe = next(self.bo._queue) 1458 except StopIteration:

~\anaconda3\lib\site-packages\bayes_opt\bayesian_optimization.py in next(self) 24 if self.empty: ---> 25 raise StopIteration("Queue is empty, no more objects to retrieve.") 26 obj = self._queue[0]

StopIteration: Queue is empty, no more objects to retrieve.

During handling of the above exception, another exception occurred:

ValueError Traceback (most recent call last)

in ----> 1 r = o.minimize(square) ~\anaconda3\lib\site-packages\nevergrad\optimization\base.py in minimize(self, objective_function, executor, batch_mode, verbosity) 572 print(f"Launching {new_sugg} jobs with new suggestions") 573 for _ in range(new_sugg): --> 574 args = self.ask() 575 self._running_jobs.append((args, executor.submit(func, *args.args, **args.kwargs))) 576 if new_sugg: ~\anaconda3\lib\site-packages\nevergrad\optimization\base.py in ask(self) 414 candidate = self._suggestions.pop() 415 else: --> 416 candidate = self._internal_ask_candidate() 417 # only register actual asked points 418 if candidate.satisfies_constraints(): ~\anaconda3\lib\site-packages\nevergrad\optimization\optimizerlib.py in _internal_ask_candidate(self) 1457 x_probe = next(self.bo._queue) 1458 except StopIteration: -> 1459 x_probe = self.bo.suggest(util) # this is time consuming 1460 x_probe = [x_probe[self._fake_function.key(i)] for i in range(len(x_probe))] 1461 data = self._transform.backward(np.array(x_probe, copy=False)) ~\anaconda3\lib\site-packages\bayes_opt\bayesian_optimization.py in suggest(self, utility_function) 126 with warnings.catch_warnings(): 127 warnings.simplefilter("ignore") --> 128 self._gp.fit(self._space.params, self._space.target) 129 130 # Finding argmax of the acquisition function. ~\anaconda3\lib\site-packages\sklearn\gaussian_process\_gpr.py in fit(self, X, y) 230 231 # First optimize starting from theta specified in kernel --> 232 optima = [(self._constrained_optimization(obj_func, 233 self.kernel_.theta, 234 self.kernel_.bounds))] ~\anaconda3\lib\site-packages\sklearn\gaussian_process\_gpr.py in _constrained_optimization(self, obj_func, initial_theta, bounds) 499 def _constrained_optimization(self, obj_func, initial_theta, bounds): 500 if self.optimizer == "fmin_l_bfgs_b": --> 501 opt_res = scipy.optimize.minimize( 502 obj_func, initial_theta, method="L-BFGS-B", jac=True, 503 bounds=bounds) ~\anaconda3\lib\site-packages\scipy\optimize\_minimize.py in minimize(fun, x0, args, method, jac, hess, hessp, bounds, constraints, tol, callback, options) 615 **options) 616 elif meth == 'l-bfgs-b': --> 617 return _minimize_lbfgsb(fun, x0, args, jac, bounds, 618 callback=callback, **options) 619 elif meth == 'tnc': ~\anaconda3\lib\site-packages\scipy\optimize\lbfgsb.py in _minimize_lbfgsb(fun, x0, args, jac, bounds, disp, maxcor, ftol, gtol, eps, maxfun, maxiter, iprint, callback, maxls, finite_diff_rel_step, **unknown_options) 304 iprint = disp 305 --> 306 sf = _prepare_scalar_function(fun, x0, jac=jac, args=args, epsilon=eps, 307 bounds=new_bounds, 308 finite_diff_rel_step=finite_diff_rel_step) ~\anaconda3\lib\site-packages\scipy\optimize\optimize.py in _prepare_scalar_function(fun, x0, jac, args, bounds, epsilon, finite_diff_rel_step, hess) 259 # ScalarFunction caches. Reuse of fun(x) during grad 260 # calculation reduces overall function evaluations. --> 261 sf = ScalarFunction(fun, x0, args, grad, hess, 262 finite_diff_rel_step, bounds, epsilon=epsilon) 263 ~\anaconda3\lib\site-packages\scipy\optimize\_differentiable_functions.py in __init__(self, fun, x0, args, grad, hess, finite_diff_rel_step, finite_diff_bounds, epsilon) 74 75 self._update_fun_impl = update_fun ---> 76 self._update_fun() 77 78 # Gradient evaluation ~\anaconda3\lib\site-packages\scipy\optimize\_differentiable_functions.py in _update_fun(self) 164 def _update_fun(self): 165 if not self.f_updated: --> 166 self._update_fun_impl() 167 self.f_updated = True 168 ~\anaconda3\lib\site-packages\scipy\optimize\_differentiable_functions.py in update_fun() 71 72 def update_fun(): ---> 73 self.f = fun_wrapped(self.x) 74 75 self._update_fun_impl = update_fun ~\anaconda3\lib\site-packages\scipy\optimize\_differentiable_functions.py in fun_wrapped(x) 68 def fun_wrapped(x): 69 self.nfev += 1 ---> 70 return fun(x, *args) 71 72 def update_fun(): ~\anaconda3\lib\site-packages\scipy\optimize\optimize.py in __call__(self, x, *args) 72 def __call__(self, x, *args): 73 """ returns the the function value """ ---> 74 self._compute_if_needed(x, *args) 75 return self._value 76 ~\anaconda3\lib\site-packages\scipy\optimize\optimize.py in _compute_if_needed(self, x, *args) 66 if not np.all(x == self.x) or self._value is None or self.jac is None: 67 self.x = np.asarray(x).copy() ---> 68 fg = self.fun(x, *args) 69 self.jac = fg[1] 70 self._value = fg[0] ~\anaconda3\lib\site-packages\sklearn\gaussian_process\_gpr.py in obj_func(theta, eval_gradient) 222 def obj_func(theta, eval_gradient=True): 223 if eval_gradient: --> 224 lml, grad = self.log_marginal_likelihood( 225 theta, eval_gradient=True, clone_kernel=False) 226 return -lml, -grad ~\anaconda3\lib\site-packages\sklearn\gaussian_process\_gpr.py in log_marginal_likelihood(self, theta, eval_gradient, clone_kernel) 474 y_train = y_train[:, np.newaxis] 475 --> 476 alpha = cho_solve((L, True), y_train) # Line 3 477 478 # Compute log-likelihood (compare line 7) ~\anaconda3\lib\site-packages\scipy\linalg\decomp_cholesky.py in cho_solve(c_and_lower, b, overwrite_b, check_finite) 192 (c, lower) = c_and_lower 193 if check_finite: --> 194 b1 = asarray_chkfinite(b) 195 c = asarray_chkfinite(c) 196 else: ~\anaconda3\lib\site-packages\numpy\lib\function_base.py in asarray_chkfinite(a, dtype, order) 483 a = asarray(a, dtype=dtype, order=order) 484 if a.dtype.char in typecodes['AllFloat'] and not np.isfinite(a).all(): --> 485 raise ValueError( 486 "array must not contain infs or NaNs") 487 return a ValueError: array must not contain infs or NaNs
jrapin commented 3 years ago

Hi, I could not repro, which version of nevergrad and bayes_opt are you using? Can you reproduce it on master?

Screen Shot 2020-10-26 at 1 22 48 PM
valentinosantucci commented 3 years ago

Hello, thanks for the reply. I am using nevergrad 0.4.2post1 and bayes_opt 1.2.0. Both running on windows with last Anaconda 3 with all modules updated.

jrapin commented 3 years ago

Does it happen all the time? I won't be able to do much if I can't reproduce it :s

fsmosca commented 3 years ago

@goldengod There are recent bug fixes on BO that are not in 0.4.2post1. As jrapin said try the master repo.

@jrapin Perhaps make a new release as there are bug fixes since 0.4.2.post1.

valentinosantucci commented 3 years ago

I just updated to the very recent 0.4.2.post2 and everything works correctly. Thank you!

jrapin commented 3 years ago

I thought all the fixes had been included but indeed some were missing, thanks @fsmosca Closing this then, please open another issue if you have new problems ;)