Skip to content

Commit 174c1f4

Browse files
authored
Remove global seeding and add test fixture for seed (#78)
* Remove global seeding and add test fixture for seed Signed-off-by: Reeb Lucia <Lucia.Reeb@de.bosch.com> * Add ADR Signed-off-by: Reeb Lucia <Lucia.Reeb@de.bosch.com> * Bump version Signed-off-by: Reeb Lucia <Lucia.Reeb@de.bosch.com> Signed-off-by: Reeb Lucia <Lucia.Reeb@de.bosch.com>
1 parent 36e1ad6 commit 174c1f4

12 files changed

+170
-110
lines changed

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
# Custom
22
.vscode/
3+
.idea/
34
poetry.toml
45
dask-worker-space/
56

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
### Context
2+
While our suite of unit tests should run in a stable and deterministic manner,
3+
running the tests under stochastic conditions allows us to detect instabilities
4+
that would otherwise remain undetected. Especially for the set of reference tests,
5+
we would like to ascertain that they run stable even with random seeds.
6+
In addition, we would like to mitigate the issues that arise
7+
from some of the optimizers setting a global (torch) seed, which is affecting
8+
subsequent tests.
9+
10+
### Decision
11+
Instead of using a constant (fixed) seed, we will seed all reference tests with a
12+
random seed from a test fixture. That ensures that all tests will be fully reproducible,
13+
since the seed will be displayed in the test output.
14+
15+
### Consequences
16+
We accept that our test suite will not be fully deterministic any more, and that it
17+
is possible that instabilities will surface in the existing tests.

blackboxopt/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "4.8.3"
1+
__version__ = "4.8.4"
22

33
from parameterspace import ParameterSpace
44

blackboxopt/optimizers/testing.py

+28-6
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def _initialize_optimizer(
2626
objective: Objective,
2727
objectives: List[Objective],
2828
space: Optional[ps.ParameterSpace] = None,
29-
seed=42,
29+
seed: Optional[int] = None,
3030
) -> Optimizer:
3131
if space is None:
3232
space = ps.ParameterSpace()
@@ -35,6 +35,7 @@ def _initialize_optimizer(
3535
space.add(ps.ContinuousParameter("p3", (0, 1)))
3636
space.add(ps.CategoricalParameter("p4", [True, False]))
3737
space.add(ps.OrdinalParameter("p5", ("small", "medium", "large")))
38+
space.seed(seed)
3839

3940
if issubclass(optimizer_class, MultiObjectiveOptimizer):
4041
return optimizer_class(space, objectives, seed=seed, **optimizer_kwargs)
@@ -50,6 +51,7 @@ def optimize_single_parameter_sequentially_for_n_max_evaluations(
5051
Type[SingleObjectiveOptimizer], Type[MultiObjectiveOptimizer]
5152
],
5253
optimizer_kwargs: dict,
54+
seed: Optional[int] = None,
5355
n_max_evaluations: int = 20,
5456
):
5557
"""[summary]
@@ -58,6 +60,7 @@ def optimize_single_parameter_sequentially_for_n_max_evaluations(
5860
optimizer_class: [description]
5961
optimizer_kwargs: [description]
6062
n_max_evaluations: [description]
63+
seed: (optional) custom seed
6164
6265
Returns:
6366
[description]
@@ -76,6 +79,7 @@ def quadratic_function(p1):
7679
optimizer_kwargs,
7780
objective=Objective("loss", False),
7881
objectives=[Objective("loss", False), Objective("score", True)],
82+
seed=seed,
7983
)
8084

8185
eval_spec = optimizer.generate_evaluation_specification()
@@ -114,6 +118,7 @@ def is_deterministic_with_fixed_seed_and_larger_space(
114118
Type[SingleObjectiveOptimizer], Type[MultiObjectiveOptimizer]
115119
],
116120
optimizer_kwargs: dict,
121+
seed: Optional[int] = None,
117122
):
118123
"""Check if optimizer is deterministic.
119124
@@ -129,6 +134,7 @@ def is_deterministic_with_fixed_seed_and_larger_space(
129134
optimizer_kwargs: Expected to contain additional arguments for initializing
130135
the optimizer. (`search_space` and `objective(s)` are set automatically
131136
by the test.)
137+
seed: (optional) custom seed
132138
133139
Returns:
134140
`True` if the test is passed.
@@ -145,7 +151,7 @@ def is_deterministic_with_fixed_seed_and_larger_space(
145151
optimizer_kwargs,
146152
objective=Objective("loss", False),
147153
objectives=[Objective("loss", False)],
148-
seed=42,
154+
seed=seed or 42,
149155
)
150156

151157
for i in range(n_evaluations):
@@ -166,6 +172,7 @@ def is_deterministic_when_reporting_shuffled_evaluations(
166172
Type[SingleObjectiveOptimizer], Type[MultiObjectiveOptimizer]
167173
],
168174
optimizer_kwargs: dict,
175+
seed: Optional[int] = None,
169176
):
170177
"""Check if determinism isn't affected by the order of initially reported data.
171178
@@ -181,13 +188,15 @@ def is_deterministic_when_reporting_shuffled_evaluations(
181188
optimizer_kwargs: Expected to contain additional arguments for initializing
182189
the optimizer. (`search_space` and `objective(s)` are set automatically
183190
by the test.)
191+
seed: (optional) custom seed
184192
185193
Returns:
186194
`True` if the test is passed.
187195
"""
188196

189197
space = ps.ParameterSpace()
190198
space.add(ps.ContinuousParameter("p1", (0, 1)))
199+
space.seed(seed)
191200

192201
def _run_experiment_1d(es):
193202
x = es.configuration["p1"]
@@ -205,7 +214,7 @@ def _run_experiment_1d(es):
205214
objective=Objective("loss", False),
206215
objectives=[Objective("loss", False)],
207216
space=space,
208-
seed=0,
217+
seed=seed or 0,
209218
)
210219

211220
# Report initial data in different order
@@ -217,8 +226,8 @@ def _run_experiment_1d(es):
217226
)
218227
for es in eval_specs
219228
]
220-
random.seed(run_idx)
221-
random.shuffle(run["initial_evaluations"])
229+
shuffle_rng = random.Random(run_idx)
230+
shuffle_rng.shuffle(run["initial_evaluations"])
222231
opt.report(run["initial_evaluations"])
223232

224233
# Start optimizing
@@ -246,6 +255,7 @@ def handles_reporting_evaluations_list(
246255
Type[SingleObjectiveOptimizer], Type[MultiObjectiveOptimizer]
247256
],
248257
optimizer_kwargs: dict,
258+
seed: Optional[int] = None,
249259
):
250260
"""Check if optimizer's report method can process an iterable of evaluations.
251261
@@ -259,6 +269,7 @@ def handles_reporting_evaluations_list(
259269
optimizer_kwargs: Expected to contain additional arguments for initializing
260270
the optimizer. (`search_space` and `objective(s)` are set automatically
261271
by the test.)
272+
seed: (optional) custom seed
262273
263274
Returns:
264275
`True` if the test is passed.
@@ -268,7 +279,7 @@ def handles_reporting_evaluations_list(
268279
optimizer_kwargs,
269280
objective=Objective("loss", False),
270281
objectives=[Objective("loss", False)],
271-
seed=42,
282+
seed=seed,
272283
)
273284
evaluations = []
274285
for i in range(3):
@@ -286,6 +297,7 @@ def raises_evaluation_error_when_reporting_unknown_objective(
286297
Type[SingleObjectiveOptimizer], Type[MultiObjectiveOptimizer]
287298
],
288299
optimizer_kwargs: dict,
300+
seed: Optional[int] = None,
289301
):
290302
"""Check if optimizer's report method raises exception in case objective is unknown.
291303
@@ -297,6 +309,7 @@ def raises_evaluation_error_when_reporting_unknown_objective(
297309
optimizer_kwargs: Expected to contain additional arguments for initializing
298310
the optimizer. (`search_space` and `objective(s)` are set automatically
299311
by the test.)
312+
seed: (optional) custom seed
300313
301314
Returns:
302315
`True` if the test is passed.
@@ -306,6 +319,7 @@ def raises_evaluation_error_when_reporting_unknown_objective(
306319
optimizer_kwargs,
307320
objective=Objective("loss", False),
308321
objectives=[Objective("loss", False)],
322+
seed=seed,
309323
)
310324
es_1 = opt.generate_evaluation_specification()
311325
es_2 = opt.generate_evaluation_specification()
@@ -341,6 +355,7 @@ def respects_fixed_parameter(
341355
Type[SingleObjectiveOptimizer], Type[MultiObjectiveOptimizer]
342356
],
343357
optimizer_kwargs: dict,
358+
seed: Optional[int] = None,
344359
):
345360
"""Check if optimizer's generated evaluation specifications contain the values
346361
a parameter in the search space was fixed to.
@@ -350,13 +365,15 @@ def respects_fixed_parameter(
350365
optimizer_kwargs: Expected to contain additional arguments for initializing
351366
the optimizer. (`search_space` and `objective(s)` are set automatically
352367
by the test.)
368+
seed: (optional) custom seed
353369
354370
Returns:
355371
`True` if the test is passed.
356372
"""
357373
space = ps.ParameterSpace()
358374
space.add(ps.ContinuousParameter("my_fixed_param", (-10.0, 200.0)))
359375
space.add(ps.ContinuousParameter("x", (-2.0, 2.0)))
376+
space.seed(seed)
360377

361378
fixed_value = 1.0
362379
space.fix(my_fixed_param=fixed_value)
@@ -366,6 +383,7 @@ def respects_fixed_parameter(
366383
objective=Objective("loss", False),
367384
objectives=[Objective("loss", False)],
368385
space=space,
386+
seed=seed,
369387
)
370388
for _ in range(5):
371389
es = opt.generate_evaluation_specification()
@@ -383,6 +401,7 @@ def handles_conditional_space(
383401
Type[SingleObjectiveOptimizer], Type[MultiObjectiveOptimizer]
384402
],
385403
optimizer_kwargs: dict,
404+
seed: Optional[int] = None,
386405
):
387406
"""Check if optimizer handles conditional i.e. hierarchical search spaces.
388407
@@ -391,6 +410,7 @@ def handles_conditional_space(
391410
optimizer_kwargs: Expected to contain additional arguments for initializing
392411
the optimizer. (`search_space` and `objective(s)` are set automatically
393412
by the test.)
413+
seed: (optional) custom seed
394414
395415
Returns:
396416
`True` if the test is passed.
@@ -402,13 +422,15 @@ def handles_conditional_space(
402422
ps.ContinuousParameter("momentum", (0.0, 1.0)),
403423
lambda optimizer: optimizer == "sgd",
404424
)
425+
space.seed(seed)
405426

406427
opt = _initialize_optimizer(
407428
optimizer_class,
408429
optimizer_kwargs,
409430
objective=Objective("loss", False),
410431
objectives=[Objective("loss", False)],
411432
space=space,
433+
seed=seed,
412434
)
413435

414436
for _ in range(10):

0 commit comments

Comments
 (0)