Skip to content

Commit f5e15f3

Browse files
LGroaeivazi
andauthored
update reference tests to use two objectives in mo case by default (#89)
* update reference tests to use two objectives in mo case by default --------- Signed-off-by: Grossberger Lukas (CR/AIR2.2) <Lukas.Grossberger@de.bosch.com> Signed-off-by: anna.eivazi <anna.eivazi@de.bosch.com> Co-authored-by: anna.eivazi <anna.eivazi@de.bosch.com>
1 parent 6e9c790 commit f5e15f3

File tree

3 files changed

+59
-58
lines changed

3 files changed

+59
-58
lines changed

blackboxopt/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "4.11.0"
1+
__version__ = "4.12.0"
22

33
from parameterspace import ParameterSpace
44

blackboxopt/optimizers/testing.py

+57-56
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
def _initialize_optimizer(
2424
optimizer_class,
2525
optimizer_kwargs: dict,
26-
objective: Objective,
27-
objectives: List[Objective],
26+
objective: Optional[Objective] = None,
27+
objectives: Optional[List[Objective]] = None,
2828
space: Optional[ps.ParameterSpace] = None,
2929
seed: Optional[int] = None,
3030
) -> Optimizer:
@@ -38,9 +38,13 @@ def _initialize_optimizer(
3838
space.seed(seed)
3939

4040
if issubclass(optimizer_class, MultiObjectiveOptimizer):
41+
if objectives is None:
42+
objectives = [Objective("loss", False), Objective("score", True)]
4143
return optimizer_class(space, objectives, seed=seed, **optimizer_kwargs)
4244

4345
if issubclass(optimizer_class, SingleObjectiveOptimizer):
46+
if objective is None:
47+
objective = Objective("loss", False)
4448
return optimizer_class(space, objective, seed=seed, **optimizer_kwargs)
4549

4650
return optimizer_class(space, seed=seed, **optimizer_kwargs)
@@ -74,13 +78,7 @@ def quadratic_function(p1):
7478
"blackboxopt.base.Optimizer"
7579
)
7680

77-
optimizer = _initialize_optimizer(
78-
optimizer_class,
79-
optimizer_kwargs,
80-
objective=Objective("loss", False),
81-
objectives=[Objective("loss", False), Objective("score", True)],
82-
seed=seed,
83-
)
81+
optimizer = _initialize_optimizer(optimizer_class, optimizer_kwargs, seed=seed)
8482

8583
eval_spec = optimizer.generate_evaluation_specification()
8684

@@ -139,26 +137,28 @@ def is_deterministic_with_fixed_seed_and_larger_space(
139137
Returns:
140138
`True` if the test is passed.
141139
"""
140+
if seed is None:
141+
seed = 42
142+
142143
n_evaluations = 5
143144
losses = [0.1, 0.2, 0.3, 0.4, 0.5]
144145

145146
run_0_configs: List[Evaluation] = []
146147
run_1_configs: List[Evaluation] = []
147148

148149
for run_configs in [run_0_configs, run_1_configs]:
149-
opt = _initialize_optimizer(
150-
optimizer_class,
151-
optimizer_kwargs,
152-
objective=Objective("loss", False),
153-
objectives=[Objective("loss", False)],
154-
seed=seed or 42,
155-
)
150+
opt = _initialize_optimizer(optimizer_class, optimizer_kwargs, seed=seed)
156151

157152
for i in range(n_evaluations):
158153
es = opt.generate_evaluation_specification()
154+
155+
objectives = {"loss": losses[i]}
156+
if isinstance(opt, MultiObjectiveOptimizer):
157+
objectives["score"] = -1.0 * losses[i] ** 2
159158
evaluation = es.create_evaluation(
160-
objectives={"loss": losses[i]}, constraints={"constraint": 10.0}
159+
objectives=objectives, constraints={"constraint": 10.0}
161160
)
161+
162162
opt.report(evaluation)
163163

164164
run_configs.append(evaluation.configuration)
@@ -193,6 +193,8 @@ def is_deterministic_when_reporting_shuffled_evaluations(
193193
Returns:
194194
`True` if the test is passed.
195195
"""
196+
if seed is None:
197+
seed = 0
196198

197199
space = ps.ParameterSpace()
198200
space.add(ps.ContinuousParameter("p1", (0, 1)))
@@ -209,12 +211,7 @@ def _run_experiment_1d(es):
209211
for run_idx, run in runs.items():
210212
run["evaluations"] = []
211213
opt = _initialize_optimizer(
212-
optimizer_class,
213-
optimizer_kwargs,
214-
objective=Objective("loss", False),
215-
objectives=[Objective("loss", False)],
216-
space=space,
217-
seed=seed or 0,
214+
optimizer_class, optimizer_kwargs, space=space, seed=seed
218215
)
219216

220217
# Report initial data in different order
@@ -226,17 +223,25 @@ def _run_experiment_1d(es):
226223
)
227224
for es in eval_specs
228225
]
226+
if isinstance(opt, MultiObjectiveOptimizer):
227+
for e in run["initial_evaluations"]:
228+
e.objectives["score"] = -1.0 * e.objectives["loss"] ** 2
229+
229230
shuffle_rng = random.Random(run_idx)
230231
shuffle_rng.shuffle(run["initial_evaluations"])
231232
opt.report(run["initial_evaluations"])
232233

233234
# Start optimizing
234235
for _ in range(5):
235236
es = opt.generate_evaluation_specification()
237+
238+
objectives = {"loss": _run_experiment_1d(es)}
239+
if isinstance(opt, MultiObjectiveOptimizer):
240+
objectives["score"] = -1.0 * objectives["loss"] ** 2
236241
evaluation = es.create_evaluation(
237-
objectives={"loss": _run_experiment_1d(es)},
238-
constraints={"constraint": 10.0},
242+
objectives=objectives, constraints={"constraint": 10.0}
239243
)
244+
240245
opt.report(evaluation)
241246
run["evaluations"].append(evaluation)
242247

@@ -276,18 +281,17 @@ def handles_reporting_evaluations_list(
276281
Returns:
277282
`True` if the test is passed.
278283
"""
279-
opt = _initialize_optimizer(
280-
optimizer_class,
281-
optimizer_kwargs,
282-
objective=Objective("loss", False),
283-
objectives=[Objective("loss", False)],
284-
seed=seed,
285-
)
284+
opt = _initialize_optimizer(optimizer_class, optimizer_kwargs, seed=seed)
286285
evaluations = []
287286
for i in range(3):
288287
es = opt.generate_evaluation_specification()
288+
289+
objectives = {"loss": 0.42 * i}
290+
if isinstance(opt, MultiObjectiveOptimizer):
291+
objectives["score"] = float(i)
292+
289293
evaluation = es.create_evaluation(
290-
objectives={"loss": 0.42 * i}, constraints={"constraint": 10.0 * i}
294+
objectives=objectives, constraints={"constraint": 10.0 * i}
291295
)
292296
evaluations.append(evaluation)
293297

@@ -316,13 +320,7 @@ def raises_evaluation_error_when_reporting_unknown_objective(
316320
Returns:
317321
`True` if the test is passed.
318322
"""
319-
opt = _initialize_optimizer(
320-
optimizer_class,
321-
optimizer_kwargs,
322-
objective=Objective("loss", False),
323-
objectives=[Objective("loss", False)],
324-
seed=seed,
325-
)
323+
opt = _initialize_optimizer(optimizer_class, optimizer_kwargs, seed=seed)
326324
es_1 = opt.generate_evaluation_specification()
327325
es_2 = opt.generate_evaluation_specification()
328326
es_3 = opt.generate_evaluation_specification()
@@ -339,7 +337,11 @@ def raises_evaluation_error_when_reporting_unknown_objective(
339337
evaluation_3 = es_3.create_evaluation(
340338
objectives={"loss": 4}, constraints={"constraint": 10.0}
341339
)
342-
opt.report([evaluation_1, evaluation_2, evaluation_3])
340+
evaluations = [evaluation_1, evaluation_2, evaluation_3]
341+
if isinstance(opt, MultiObjectiveOptimizer):
342+
for e in evaluations:
343+
e.objectives["score"] = 0.0
344+
opt.report(evaluations)
343345

344346
raise AssertionError(
345347
f"Optimizer {optimizer_class} did not raise an ObjectivesError when a "
@@ -380,19 +382,19 @@ def respects_fixed_parameter(
380382
fixed_value = 1.0
381383
space.fix(my_fixed_param=fixed_value)
382384
opt = _initialize_optimizer(
383-
optimizer_class,
384-
optimizer_kwargs,
385-
objective=Objective("loss", False),
386-
objectives=[Objective("loss", False)],
387-
space=space,
388-
seed=seed,
385+
optimizer_class, optimizer_kwargs, space=space, seed=seed
389386
)
390387
for _ in range(5):
391388
es = opt.generate_evaluation_specification()
392389
assert es.configuration["my_fixed_param"] == fixed_value
390+
391+
objectives = {"loss": es.configuration["x"] ** 2}
392+
if isinstance(opt, MultiObjectiveOptimizer):
393+
objectives["score"] = -objectives["loss"]
394+
393395
opt.report(
394396
es.create_evaluation(
395-
objectives={"loss": es.configuration["x"] ** 2},
397+
objectives=objectives,
396398
constraints={"constraint": 10.0},
397399
)
398400
)
@@ -427,20 +429,19 @@ def handles_conditional_space(
427429
space.seed(seed)
428430

429431
opt = _initialize_optimizer(
430-
optimizer_class,
431-
optimizer_kwargs,
432-
objective=Objective("loss", False),
433-
objectives=[Objective("loss", False)],
434-
space=space,
435-
seed=seed,
432+
optimizer_class, optimizer_kwargs, space=space, seed=seed
436433
)
437434

438435
for _ in range(10):
439436
es = opt.generate_evaluation_specification()
440-
dummy_loss = es.configuration.get("momentum", 1.0) * es.configuration["lr"] ** 2
437+
objectives = {
438+
"loss": es.configuration.get("momentum", 1.0) * es.configuration["lr"] ** 2
439+
}
440+
if isinstance(opt, MultiObjectiveOptimizer):
441+
objectives["score"] = -1.0 * es.configuration["lr"] ** 2
441442
opt.report(
442443
es.create_evaluation(
443-
objectives={"loss": dummy_loss}, constraints={"constraint": 10.0}
444+
objectives=objectives, constraints={"constraint": 10.0}
444445
)
445446
)
446447

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "blackboxopt"
3-
version = "4.11.0"
3+
version = "4.12.0"
44
description = "A common interface for blackbox optimization algorithms along with useful helpers like parallel optimization loops, analysis and visualization scripts."
55
readme = "README.md"
66
repository = "https://github.com/boschresearch/blackboxopt"

0 commit comments

Comments
 (0)