Skip to content

Commit

Permalink
distinguish scipy optimizer without restart and with restart (#1575)
Browse files Browse the repository at this point in the history
* nor

* nor

* no_gomea

* fixdetails

* black

* switch_to_101

* lessflaky

* compare_new

* compare_new
  • Loading branch information
teytaud authored Nov 18, 2023
1 parent 8442810 commit 48bf30f
Show file tree
Hide file tree
Showing 10 changed files with 57 additions and 52 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ orbs:
executors:
ubuntu-python38:
docker:
- image: cimg/python:3.8
- image: cimg/python:3.9
working_directory: ~/repo


Expand Down
2 changes: 1 addition & 1 deletion nevergrad/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
__all__ = ["optimizers", "families", "callbacks", "p", "typing", "errors", "ops"]


__version__ = "1.0.0"
__version__ = "1.0.1"
4 changes: 2 additions & 2 deletions nevergrad/benchmark/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ def launch(
df = core.compute(experiment, cap_index=cap_index, seed=seed)
else:
with futures.ProcessPoolExecutor(max_workers=num_workers) as executor:
df = core.compute(
experiment, seed=seed, cap_index=cap_index, executor=executor, num_workers=num_workers
df = core.compute( # type: ignore
experiment, seed=seed, cap_index=cap_index, executor=executor, num_workers=num_workers # type: ignore
)
# save data to csv
try:
Expand Down
26 changes: 13 additions & 13 deletions nevergrad/benchmark/experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def refactor_optims(x: tp.List[tp.Any]) -> tp.List[tp.Any]: # type: ignore
"RFMetaModel",
]
algos["yanoisybbob"] = [
"BFGS",
"RBFGS",
"MicroCMA",
"NoisyDiscreteOnePlusOne",
"RandomSearch",
Expand Down Expand Up @@ -1065,7 +1065,7 @@ def deceptive(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:
optims = get_optimizers("basics", seed=next(seedg))
optims = ["CMA", "DE", "TwoPointsDE", "PSO", "OnePlusOne", "RandomSearch", "NGOptRW"]
optims = [
"BFGS",
"RBFGS",
"LBFGSB",
"DE",
"TwoPointsDE",
Expand Down Expand Up @@ -1281,7 +1281,7 @@ def multimodal(seed: tp.Optional[int] = None, para: bool = False) -> tp.Iterator
if not para:
optims += get_optimizers("scipy", seed=next(seedg))
optims = [
"BFGS",
"RBFGS",
"LBFGSB",
"DE",
"TwoPointsDE",
Expand Down Expand Up @@ -1324,7 +1324,7 @@ def hdmultimodal(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:

optims = get_optimizers("basics", "multimodal", seed=next(seedg))
optims = [
"BFGS",
"RBFGS",
"LBFGSB",
"DE",
"TwoPointsDE",
Expand Down Expand Up @@ -1509,7 +1509,7 @@ def yabbob(
"GeneticDE",
]
optims = ["LargeCMA", "TinyCMA", "OldCMA", "MicroCMA"]
optims = ["BFGS", "LBFGSB"]
optims = ["RBFGS", "LBFGSB"]
optims = get_optimizers("oneshot", seed=next(seedg)) # type: ignore
optims = [
"MetaTuneRecentering",
Expand All @@ -1519,7 +1519,7 @@ def yabbob(
"LHSCauchySearch",
]
optims = [
"BFGS",
"RBFGS",
"LBFGSB",
"MicroCMA",
"RandomSearch",
Expand Down Expand Up @@ -2041,7 +2041,7 @@ def pbbob(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:
]
optims = ["ChainMetaModelSQP", "MetaModelOnePlusOne", "MetaModelDE"]
optims = ["LargeCMA", "TinyCMA", "OldCMA", "MicroCMA"]
optims = ["BFGS", "LBFGSB", "MemeticDE"]
optims = ["RBFGS", "LBFGSB", "MemeticDE"]
optims = ["QrDE", "QODE", "LhsDE", "NGOpt", "NGOptRW"]
optims = ["TinyCMA", "QODE", "MetaModelOnePlusOne", "LhsDE", "TinyLhsDE", "TinyQODE"]
optims = ["QOPSO", "QORealSpacePSO"]
Expand Down Expand Up @@ -2096,7 +2096,7 @@ def zp_pbbob(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:
]
optims = ["ChainMetaModelSQP", "MetaModelOnePlusOne", "MetaModelDE"]
optims = ["LargeCMA", "TinyCMA", "OldCMA", "MicroCMA"]
optims = ["BFGS", "LBFGSB", "MemeticDE"]
optims = ["RBFGS", "LBFGSB", "MemeticDE"]
optims = ["QrDE", "QODE", "LhsDE", "NGOpt", "NGOptRW"]
optims = ["TinyCMA", "QODE", "MetaModelOnePlusOne", "LhsDE", "TinyLhsDE", "TinyQODE"]
optims = ["QOPSO", "QORealSpacePSO"]
Expand Down Expand Up @@ -2462,7 +2462,7 @@ def aquacrop_fao(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:
funcs = [NgAquacrop(i, 300.0 + 150.0 * np.cos(i)) for i in range(3, 7)]
seedg = create_seed_generator(seed)
optims = get_optimizers("basics", seed=next(seedg))
optims = ["BFGS", "LBFGSB", "MemeticDE"]
optims = ["RBFGS", "LBFGSB", "MemeticDE"]
optims = ["PCABO"]
optims = ["PCABO", "NGOpt", "QODE"]
optims = ["QOPSO"] # , "QORealSpacePSO", "RealSpacePSO"]
Expand Down Expand Up @@ -2509,10 +2509,10 @@ def rocket(seed: tp.Optional[int] = None, seq: bool = False) -> tp.Iterator[Expe
seedg = create_seed_generator(seed)
optims = get_optimizers("basics", seed=next(seedg))
optims += ["NGOpt", "NGOptRW", "ChainMetaModelSQP"]
optims = ["BFGS", "LBFGSB", "MemeticDE"]
optims = ["RBFGS", "LBFGSB", "MemeticDE"]
optims = ["CMA", "PSO", "QODE", "QRDE", "MetaModelPSO"]
if seq:
optims += ["BFGS", "LBFGSB", "MemeticDE"]
optims += ["RBFGS", "LBFGSB", "MemeticDE"]
optims = ["NGOpt"]
optims = ["PCABO"]
optims = ["PCABO", "NGOpt", "QODE"]
Expand Down Expand Up @@ -3287,7 +3287,7 @@ def photonics(
"MetaModelDE",
"SVMMetaModelDE",
"RFMetaModelDE",
"BFGS",
"RBFGS",
"LBFGSB",
]
optims = ["QrDE", "QODE", "RFMetaModelDE"]
Expand Down Expand Up @@ -3502,7 +3502,7 @@ def lsgo() -> tp.Iterator[Experiment]:
"CMA",
"PSO",
"OnePlusOne",
"BFGS",
"RBFGS",
]
optims = ["PSO", "RealPSO"]
optims = ["CMA", "PSO", "SQOPSO", "TinyCMA", "Cobyla"]
Expand Down
6 changes: 3 additions & 3 deletions nevergrad/optimization/optimizerlib.py
Original file line number Diff line number Diff line change
Expand Up @@ -2031,7 +2031,7 @@ def __init__(
) -> None:
super().__init__(parametrization, budget=budget, num_workers=num_workers)
optims: tp.List[base.Optimizer] = []
optims += [BFGS(self.parametrization, num_workers=1) for _ in range(num_workers)]
optims += [RBFGS(self.parametrization, num_workers=1) for _ in range(num_workers)]
for opt in optims[2:]: # make sure initializations differ
opt.initial_guess = self._rng.normal(0, 1, self.dimension) # type: ignore
self.optims.clear()
Expand Down Expand Up @@ -2684,8 +2684,8 @@ def __init__(
MemeticDE = Chaining([RotatedTwoPointsDE, TwoPointsDE, DE, SQP], ["fourth", "fourth", "fourth"]).set_name(
"MemeticDE", register=True
)
QNDE = Chaining([QODE, BFGS], ["half"]).set_name("QNDE", register=True)
ChainDE = Chaining([DE, BFGS], ["half"]).set_name("ChainDE", register=True)
QNDE = Chaining([QODE, RBFGS], ["half"]).set_name("QNDE", register=True)
ChainDE = Chaining([DE, RBFGS], ["half"]).set_name("ChainDE", register=True)
OpoDE = Chaining([OnePlusOne, QODE], ["half"]).set_name("OpoDE", register=True)
OpoTinyDE = Chaining([OnePlusOne, TinyQODE], ["half"]).set_name("OpoTinyDE", register=True)
QNDE.no_parallelization = True
Expand Down
58 changes: 30 additions & 28 deletions nevergrad/optimization/recastlib.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def __init__(
"gomeatree",
"SMAC3",
"BFGS",
"RBFGS",
"LBFGSB",
"L-BFGS-B",
"SMAC",
Expand Down Expand Up @@ -366,30 +367,30 @@ def smac2_obj(p, seed: int = 0):
# best_x = weakself._normalizer.backward(np.asarray(x, dtype=float))
#

elif "gomea" in weakself.method:
import gomea

class gomea_function(gomea.fitness.BBOFitnessFunctionRealValued):
def objective_function(self, objective_index, data): # type: ignore
if weakself._normalizer is not None:
data = weakself._normalizer.backward(np.asarray(data, dtype=np.float32))
return objective_function(data)

gomea_f = gomea_function(weakself.dimension)
lm = {
"gomea": gomea.linkage.Univariate(),
"gomeablock": gomea.linkage.BlockMarginalProduct(2),
"gomeatree": gomea.linkage.LinkageTree("NMI".encode(), True, 0),
}[weakself.method]
rvgom = gomea.RealValuedGOMEA(
fitness=gomea_f,
linkage_model=lm,
lower_init_range=0.0,
upper_init_range=1.0,
max_number_of_evaluations=budget,
)
rvgom.run()
best_x = gomea_f.best_x
# elif "gomea" in weakself.method:
# import gomea
#
# class gomea_function(gomea.fitness.BBOFitnessFunctionRealValued):
# def objective_function(self, objective_index, data): # type: ignore
# if weakself._normalizer is not None:
# data = weakself._normalizer.backward(np.asarray(data, dtype=np.float32))
# return objective_function(data)
#
# gomea_f = gomea_function(weakself.dimension)
# lm = {
# "gomea": gomea.linkage.Univariate(),
# "gomeablock": gomea.linkage.BlockMarginalProduct(2),
# "gomeatree": gomea.linkage.LinkageTree("NMI".encode(), True, 0),
# }[weakself.method]
# rvgom = gomea.RealValuedGOMEA(
# fitness=gomea_f,
# linkage_model=lm,
# lower_init_range=0.0,
# upper_init_range=1.0,
# max_number_of_evaluations=budget,
# )
# rvgom.run()
# best_x = gomea_f.best_x

elif weakself.method == "CmaFmin2":
import cma # type: ignore
Expand Down Expand Up @@ -498,13 +499,14 @@ def __init__(self, *, method: str = "Nelder-Mead", random_restart: bool = False)
BOBYQA = NonObjectOptimizer(method="BOBYQA").set_name("BOBYQA", register=True)
NelderMead = NonObjectOptimizer(method="Nelder-Mead").set_name("NelderMead", register=True)
CmaFmin2 = NonObjectOptimizer(method="CmaFmin2").set_name("CmaFmin2", register=True)
GOMEA = NonObjectOptimizer(method="gomea").set_name("GOMEA", register=True)
GOMEABlock = NonObjectOptimizer(method="gomeablock").set_name("GOMEABlock", register=True)
GOMEATree = NonObjectOptimizer(method="gomeatree").set_name("GOMEATree", register=True)
# GOMEA = NonObjectOptimizer(method="gomea").set_name("GOMEA", register=True)
# GOMEABlock = NonObjectOptimizer(method="gomeablock").set_name("GOMEABlock", register=True)
# GOMEATree = NonObjectOptimizer(method="gomeatree").set_name("GOMEATree", register=True)
# NLOPT = NonObjectOptimizer(method="NLOPT").set_name("NLOPT", register=True)
Powell = NonObjectOptimizer(method="Powell").set_name("Powell", register=True)
RPowell = NonObjectOptimizer(method="Powell", random_restart=True).set_name("RPowell", register=True)
BFGS = NonObjectOptimizer(method="BFGS", random_restart=True).set_name("BFGS", register=True)
BFGS = NonObjectOptimizer(method="BFGS", random_restart=False).set_name("BFGS", register=True)
RBFGS = NonObjectOptimizer(method="BFGS", random_restart=True).set_name("RBFGS", register=True)
LBFGSB = NonObjectOptimizer(method="L-BFGS-B", random_restart=True).set_name("LBFGSB", register=True)
Cobyla = NonObjectOptimizer(method="COBYLA").set_name("Cobyla", register=True)
RCobyla = NonObjectOptimizer(method="COBYLA", random_restart=True).set_name("RCobyla", register=True)
Expand Down
4 changes: 3 additions & 1 deletion nevergrad/optimization/recorded_recommendations.csv
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ BAR,0.3611595933,-0.6457227883,-0.4265588512,0.1230634853,,,,,,,,,,,,
BAR2,0.8431454703,0.2332615776,1.0764786704,0.8859188774,,,,,,,,,,,,
BAR3,-1.8339146358,0.0,-0.4307272993,0.8416212336,,,,,,,,,,,,
BAR4,-1.8339146358,0.0,-0.4307272993,0.2533471031,,,,,,,,,,,,
BFGS,0.4777665424,-0.702409937,0.199534994,-0.691876864,,,,,,,,,,,,
BFGS,0.12288,-0.19661,0.,0.983033,,,,,,,,,,,,
#BFGS,0.4777665424,-0.702409937,0.199534994,-0.691876864,,,,,,,,,,,,
RBFGS,0.4777665424,-0.702409937,0.199534994,-0.691876864,,,,,,,,,,,,
CM,1.0082049151,-0.9099785499,-1.025147209,1.2046460074,,,,,,,,,,,,
CMA,0.4907662517,-0.4429579123,-0.499025767,0.5864103291,,,,,,,,,,,,
CMApara,0.9016450322,-0.8138004751,-0.9167966497,1.0773237384,,,,,,,,,,,,
Expand Down
4 changes: 2 additions & 2 deletions nevergrad/optimization/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,9 @@ def test_optimize_and_dump(tmp_path: Path) -> None:


def test_compare() -> None:
optimizer = optimizerlib.OnePlusOne(parametrization=2, budget=600, num_workers=6)
optimizer = optimizerlib.OnePlusOne(parametrization=2, budget=1200, num_workers=6)
optimizerlib.addCompare(optimizer)
for _ in range(100): # TODO make faster test
for _ in range(200): # TODO make faster test
x: tp.List[tp.Any] = []
for _ in range(6):
x += [optimizer.ask()]
Expand Down
1 change: 1 addition & 0 deletions nevergrad/optimization/test_optimizerlib.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,7 @@ def test_optimizers_minimal(name: str) -> None:
if optimizer_cls.one_shot or name in [
"CM",
"NLOPT_LN_PRAXIS",
"NLOPT_GN_CRS2_LM",
"ES",
"RecMixES",
"RecMutDE",
Expand Down
2 changes: 1 addition & 1 deletion requirements/bench.txt
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,4 @@ pytest-circleci-parallelized
Py-BOBYQA>=1.2
ax-platform
loguru # for fcmaes
gomea
#gomea

0 comments on commit 48bf30f

Please sign in to comment.