diff --git a/mypy.ini b/mypy.ini index 01d002dc2..1709efa63 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3,7 +3,7 @@ [mypy-scipy.*,requests,pandas,compiler_gym,compiler_gym.*,gym,gym.*,gym_anm,matplotlib.*,pytest,cma,bayes_opt.*,torchvision.models,torch.*,mpl_toolkits.*,fcmaes.*,tqdm,pillow,PIL,PIL.Image,sklearn.*,pyomo.*,pyproj,IOHexperimenter.*,tensorflow,koncept.models,cv2,imquality,imquality.brisque,lpips,mixsimulator.*,networkx.*,cdt.*,pymoo,pymoo.*,bayes_optim.*,olympus.*] ignore_missing_imports = True -[mypy-nevergrad.functions.rl.agents,torchvision,torchvision.*,nevergrad.functions.games.*,nevergrad.functions.multiobjective.pyhv,nevergrad.optimization.test_doc,,pymoo,pymoo.*,pybullet,pybullet_envs,pybulletgym,pyvirtualdisplay,aquacrop.*] +[mypy-nevergrad.functions.rl.agents,torchvision,torchvision.*,nevergrad.functions.games.*,nevergrad.functions.multiobjective.pyhv,nevergrad.optimization.test_doc,,pymoo,pymoo.*,pybullet,pybullet_envs,pybulletgym,pyvirtualdisplay,nlopt,aquacrop.*] ignore_missing_imports = True ignore_errors = True diff --git a/nevergrad/optimization/recastlib.py b/nevergrad/optimization/recastlib.py index bf55412df..9c3cc80c5 100644 --- a/nevergrad/optimization/recastlib.py +++ b/nevergrad/optimization/recastlib.py @@ -38,13 +38,14 @@ def __init__( "Nelder-Mead", "COBYLA", "SLSQP", + "NLOPT", "Powell", ], f"Unknown method '{method}'" self.method = method self.random_restart = random_restart # The following line rescales to [0, 1] if fully bounded. - if method == "CmaFmin2": + if method in ("CmaFmin2", "NLOPT"): normalizer = p.helpers.Normalizer(self.parametrization) if normalizer.fully_bounded: self._normalizer = normalizer @@ -71,7 +72,43 @@ def _optimization_function( while remaining > 0: # try to restart if budget is not elapsed options: tp.Dict[str, tp.Any] = {} if weakself.budget is None else {"maxiter": remaining} # options: tp.Dict[str, tp.Any] = {} if self.budget is None else {"maxiter": remaining} - if weakself.method == "CmaFmin2": + if weakself.method == "NLOPT": + # This is NLOPT, used as in the PCSE simulator notebook. + # ( https://github.com/ajwdewit/pcse_notebooks ). + import nlopt + + def nlopt_objective_function(*args): + data = np.asarray([arg for arg in args])[0] + assert len(data) == weakself.dimension, ( + str(data) + " does not have length " + str(weakself.dimension) + ) + if weakself._normalizer is not None: + data = weakself._normalizer.backward(np.asarray(data, dtype=np.float32)) + return objective_function(data) + + # Sbplx (based on Subplex) is used by default. + opt = nlopt.opt(nlopt.LN_SBPLX, weakself.dimension) + # Assign the objective function calculator + opt.set_min_objective(nlopt_objective_function) + # Set the bounds. + opt.set_lower_bounds(np.zeros(weakself.dimension)) + opt.set_upper_bounds(np.ones(weakself.dimension)) + # opt.set_initial_step([0.05, 0.05]) + opt.set_maxeval(budget) + # Relative tolerance for convergence + opt.set_ftol_rel(1.0e-10) + + # Start the optimization with the first guess + firstguess = 0.5 * np.ones(weakself.dimension) + best_x = opt.optimize(firstguess) + # print("\noptimum at TDWI: %s, SPAN: %s" % (x[0], x[1])) + # print("minimum value = ", opt.last_optimum_value()) + # print("result code = ", opt.last_optimize_result()) + # print("With %i function calls" % objfunc_calculator.n_calls) + if weakself._normalizer is not None: + best_x = weakself._normalizer.backward(np.asarray(best_x, dtype=np.float32)) + + elif weakself.method == "CmaFmin2": import cma # import inline in order to avoid matplotlib initialization warning def cma_objective_function(data): @@ -135,6 +172,7 @@ class NonObjectOptimizer(base.ConfiguredOptimizer): - SQP (or SLSQP): very powerful e.g. in continuous noisy optimization. It is based on approximating the objective function by quadratic models. - Powell + - NLOPT (https://nlopt.readthedocs.io/en/latest/; uses Sbplx, based on Subplex) random_restart: bool whether to restart at a random point if the optimizer converged but the budget is not entirely spent yet (otherwise, restarts from best point) @@ -154,6 +192,7 @@ def __init__(self, *, method: str = "Nelder-Mead", random_restart: bool = False) NelderMead = NonObjectOptimizer(method="Nelder-Mead").set_name("NelderMead", register=True) CmaFmin2 = NonObjectOptimizer(method="CmaFmin2").set_name("CmaFmin2", register=True) +NLOPT = NonObjectOptimizer(method="NLOPT").set_name("NLOPT", register=True) Powell = NonObjectOptimizer(method="Powell").set_name("Powell", register=True) RPowell = NonObjectOptimizer(method="Powell", random_restart=True).set_name("RPowell", register=True) Cobyla = NonObjectOptimizer(method="COBYLA").set_name("Cobyla", register=True) diff --git a/nevergrad/optimization/recorded_recommendations.csv b/nevergrad/optimization/recorded_recommendations.csv index 8b97829dc..577a8e6d7 100644 --- a/nevergrad/optimization/recorded_recommendations.csv +++ b/nevergrad/optimization/recorded_recommendations.csv @@ -146,6 +146,7 @@ NGOptBase,0.0,-0.3451057176,-0.1327329683,1.9291307781,,,,,,,,,,,, NGOptSingle16,0.0,0.0,0.0,0.0,,,,,,,,,,,, NGOptSingle25,0.0,0.0,0.0,0.0,,,,,,,,,,,, NGOptSingle9,0.0,0.0,0.0,0.0,,,,,,,,,,,, +NLOPT,0.625,0.0,0.5,0.5,,,,,,,,,,,, NaiveAnisoEMNA,1.012515477,-0.9138691467,-1.0295302074,1.2097964496,,,,,,,,,,,, NaiveAnisoEMNATBPSA,0.002380178,-0.0558141,-0.3746306258,1.3332040355,,,,,,,,,,,, NaiveIsoEMNA,1.012515477,-0.9138691467,-1.0295302074,1.2097964496,,,,,,,,,,,, diff --git a/nevergrad/optimization/test_optimizerlib.py b/nevergrad/optimization/test_optimizerlib.py index 074d9533b..97d6b76e0 100644 --- a/nevergrad/optimization/test_optimizerlib.py +++ b/nevergrad/optimization/test_optimizerlib.py @@ -211,7 +211,7 @@ def test_optimizers(name: str) -> None: optimizer_cls.__class__(**optimizer_cls._config) == optimizer_cls ), "Similar configuration are not equal" # some classes of optimizer are eigher slow or not good with small budgets: - nameparts = ["Many", "Chain", "BO", "Discrete"] + ["chain"] # TODO remove chain when possible + nameparts = ["Many", "Chain", "BO", "Discrete", "NLOPT"] + ["chain"] # TODO remove chain when possible is_ngopt = inspect.isclass(optimizer_cls) and issubclass(optimizer_cls, NGOptBase) # type: ignore verify = ( not optimizer_cls.one_shot @@ -435,6 +435,7 @@ def test_bo_parametrization_and_parameters() -> None: with pytest.warns(None) as record: # type: ignore opt = optlib.ParametrizedBO(gp_parameters={"alpha": 1})(parametrization, budget=10) assert not record, record.list # no warning + # parameters # make sure underlying BO optimizer gets instantiated correctly new_candidate = opt.parametrization.spawn_child(new_value=((True,), {})) diff --git a/requirements/bench.txt b/requirements/bench.txt index 1351533d8..b96f293d6 100644 --- a/requirements/bench.txt +++ b/requirements/bench.txt @@ -34,3 +34,4 @@ olymp==0.0.1b0 ; sys_platform == "linux" silence_tensorflow # for olymp tensorflow_probability # for olymp bayes-optim==0.2.5.5 +nlopt