Skip to content

Commit 421425d

Browse files
authored
add discrete acquisition function optimization for discrete spaces with gpbo (#79)
* add discrete acquisition function optimization for discrete spaces with gpbo --------- Signed-off-by: Grossberger Lukas (CR/AIR2.2) <Lukas.Grossberger@de.bosch.com>
1 parent a8b42f9 commit 421425d

File tree

4 files changed

+119
-31
lines changed

4 files changed

+119
-31
lines changed

blackboxopt/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "4.8.5"
1+
__version__ = "4.9.0"
22

33
from parameterspace import ParameterSpace
44

blackboxopt/optimizers/botorch_base.py

+47-29
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
from botorch.acquisition import AcquisitionFunction
3333
from botorch.exceptions import BotorchTensorDimensionWarning
3434
from botorch.models.model import Model
35-
from botorch.optim import optimize_acqf
35+
from botorch.optim import optimize_acqf, optimize_acqf_discrete
3636
from botorch.sampling.samplers import IIDNormalSampler
3737
from sklearn.impute import SimpleImputer
3838

@@ -166,27 +166,50 @@ def to_numerical(
166166
return X, Y
167167

168168

169-
def init_af_opt_kwargs(af_opt_kwargs: Optional[dict]) -> dict:
170-
"""Provide default initialization for the acquisition function optimizer
171-
configuration. Ensure that all mandatory fields are set to be used by BoTorch.
169+
def _acquisition_function_optimizer_factory(
170+
search_space: ps.ParameterSpace,
171+
af_opt_kwargs: Optional[dict],
172+
torch_dtype: torch.dtype,
173+
) -> Callable[[AcquisitionFunction], Tuple[torch.Tensor, torch.Tensor]]:
174+
"""Prepare either BoTorch's `optimize_acqf_discrete` or `optimize_acqf` depending
175+
on whether the search space is fully discrete or not and set required defaults if
176+
not overridden by `af_opt_kwargs`.
172177
173178
Args:
174-
af_opt_kwargs: Acquisition function configuration.
179+
search_space: Search space used for optimization.
180+
af_opt_kwargs: Acquisition function optimizer configuration, e.g. containing
181+
values for `n_samples` for discrete optimization, and `num_restarts`,
182+
`raw_samples` for the continuous optimization case.
183+
torch_dtype: Torch tensor type.
175184
176185
Returns:
177-
Acquisition function optimizer configuration, applicable for BoTorch's
178-
optimizer.
186+
Acquisition function optimizer that takes an acquisition function and returns a
187+
candidate with its associate acquisition function value.
179188
"""
189+
kwargs = {} if af_opt_kwargs is None else af_opt_kwargs.copy()
180190

181-
af_opt_config = {} if af_opt_kwargs is None else af_opt_kwargs
182-
183-
# number of initial samples during AF optimization
184-
af_opt_config.setdefault("raw_samples", 1024)
185-
186-
# number of restarts during AF optimization
187-
af_opt_config.setdefault("num_restarts", 4)
188-
189-
return af_opt_config
191+
is_fully_discrete_space = not any(
192+
search_space[n]["parameter"].is_continuous
193+
for n in search_space.get_parameter_names()
194+
)
195+
if is_fully_discrete_space:
196+
choices = torch.Tensor(
197+
[
198+
search_space.to_numerical(search_space.sample())
199+
for _ in range(kwargs.pop("n_samples", 5_000))
200+
]
201+
).to(dtype=torch_dtype)
202+
return functools.partial(optimize_acqf_discrete, q=1, choices=choices, **kwargs)
203+
204+
return functools.partial(
205+
optimize_acqf,
206+
q=1,
207+
# The numerical representation always lives on the unit hypercube
208+
bounds=torch.tensor([[0, 1]] * len(search_space), dtype=torch_dtype).T,
209+
num_restarts=kwargs.pop("num_restarts", 4),
210+
raw_samples=kwargs.pop("raw_samples", 1024),
211+
**kwargs,
212+
)
190213

191214

192215
def filter_y_nans(
@@ -256,7 +279,8 @@ def __init__(
256279
Providing a partially initialized class is possible with, e.g.
257280
`functools.partial(UpperConfidenceBound, beta=6.0, maximize=False)`.
258281
af_optimizer_kwargs: Settings for acquisition function optimizer,
259-
see `botorch.optim.optimize_acqf`.
282+
see `botorch.optim.optimize_acqf` and in case the whole search space
283+
is discrete: `botorch.optim.optimize_acqf_discrete`.
260284
num_initial_random_samples: Size of the initial space-filling design that
261285
is used before starting BO. The points are sampled randomly in the
262286
search space. If no random sampling is required, set it to 0.
@@ -286,7 +310,7 @@ def __init__(
286310

287311
self.model = model
288312
self.acquisition_function_factory = acquisition_function_factory
289-
self.af_opt_kwargs = init_af_opt_kwargs(af_optimizer_kwargs)
313+
self.af_optimizer_kwargs = af_optimizer_kwargs
290314

291315
def _create_fantasy_model(self, model: Model) -> Model:
292316
"""Create model with the pending specifications and model based
@@ -327,7 +351,6 @@ def _generate_evaluation_specification(self):
327351
fantasy_model = self._create_fantasy_model(self.model)
328352
fantasy_model.eval()
329353

330-
# find next configuration by optimizing the acquisition function
331354
af = self.acquisition_function_factory(fantasy_model)
332355
if getattr(af, "maximize", False):
333356
raise ValueError(
@@ -338,17 +361,12 @@ def _generate_evaluation_specification(self):
338361
"acquisition_function_factory init argument."
339362
)
340363

341-
# numerical representation always lives on hypercube
342-
bounds = torch.tensor(
343-
[[0, 1]] * len(self.search_space), dtype=self.torch_dtype
344-
).T
345-
346-
configuration, _ = optimize_acqf(
347-
af,
348-
bounds=bounds,
349-
q=1,
350-
**self.af_opt_kwargs,
364+
acquisition_function_optimizer = _acquisition_function_optimizer_factory(
365+
search_space=self.search_space,
366+
af_opt_kwargs=self.af_optimizer_kwargs,
367+
torch_dtype=self.torch_dtype,
351368
)
369+
configuration, _ = acquisition_function_optimizer(af)
352370

353371
return EvaluationSpecification(
354372
configuration=self.search_space.from_numerical(configuration[0]),

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "blackboxopt"
3-
version = "4.8.5"
3+
version = "4.9.0"
44
description = "A common interface for blackbox optimization algorithms along with useful helpers like parallel optimization loops, analysis and visualization scripts."
55
readme = "README.md"
66
repository = "https://github.com/boschresearch/blackboxopt"

tests/optimizers/botorch_base_test.py

+70
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,17 @@
66
from functools import partial
77

88
import numpy as np
9+
import parameterspace as ps
910
import pytest
1011
import torch
1112
from botorch.acquisition import UpperConfidenceBound
1213
from botorch.models import SingleTaskGP
14+
from botorch.optim import optimize_acqf, optimize_acqf_discrete
1315

1416
from blackboxopt import ConstraintsError, Evaluation, Objective
1517
from blackboxopt.optimizers.botorch_base import (
1618
SingleObjectiveBOTorchOptimizer,
19+
_acquisition_function_optimizer_factory,
1720
filter_y_nans,
1821
impute_nans_with_constant,
1922
to_numerical,
@@ -57,6 +60,73 @@ def test_all_reference_tests(reference_test, seed):
5760
)
5861

5962

63+
def test_acquisition_function_optimizer_factory_with_continuous():
64+
continuous_space = ps.ParameterSpace()
65+
continuous_space.add(ps.ContinuousParameter("conti1", (0.0, 1.0)))
66+
continuous_space.add(ps.ContinuousParameter("conti2", (-1.0, 1.0)))
67+
68+
af_opt = _acquisition_function_optimizer_factory(
69+
continuous_space, af_opt_kwargs={}, torch_dtype=torch.float64
70+
)
71+
72+
assert af_opt.func == optimize_acqf # pylint: disable=no-member
73+
74+
75+
def test_acquisition_function_optimizer_factory_with_discrete_space():
76+
discrete_space = ps.ParameterSpace()
77+
discrete_space.add(ps.IntegerParameter("integ", (-5, 10)))
78+
discrete_space.add(ps.OrdinalParameter("ordin", ("small", "medium", "large")))
79+
discrete_space.add(ps.CategoricalParameter("categ", ("woof", "miaow", "moo")))
80+
81+
af_opt = _acquisition_function_optimizer_factory(
82+
discrete_space, af_opt_kwargs={}, torch_dtype=torch.float64
83+
)
84+
85+
assert af_opt.func == optimize_acqf_discrete # pylint: disable=no-member
86+
87+
88+
def test_acquisition_function_optimizer_factory_with_mixed_space():
89+
mixed_space = ps.ParameterSpace()
90+
mixed_space.add(ps.OrdinalParameter("ordin", ("small", "medium", "large")))
91+
mixed_space.add(ps.ContinuousParameter("conti", (0.0, 1.0)))
92+
93+
af_opt = _acquisition_function_optimizer_factory(
94+
mixed_space, af_opt_kwargs={}, torch_dtype=torch.float64
95+
)
96+
97+
assert af_opt.func == optimize_acqf # pylint: disable=no-member
98+
99+
100+
def test_find_optimum_in_1d_discrete_space(seed):
101+
space = ps.ParameterSpace()
102+
space.add(ps.IntegerParameter("integ", (0, 2)))
103+
batch_shape = torch.Size()
104+
opt = SingleObjectiveBOTorchOptimizer(
105+
search_space=space,
106+
objective=Objective("loss", greater_is_better=False),
107+
model=SingleTaskGP(
108+
torch.empty((*batch_shape, 0, len(space)), dtype=torch.float64),
109+
torch.empty((*batch_shape, 0, 1), dtype=torch.float64),
110+
),
111+
acquisition_function_factory=partial(
112+
UpperConfidenceBound, beta=1.0, maximize=False
113+
),
114+
max_pending_evaluations=5,
115+
seed=seed,
116+
)
117+
118+
losses = []
119+
for _ in range(10):
120+
es = opt.generate_evaluation_specification()
121+
loss = es.configuration["integ"] ** 2
122+
losses.append(loss)
123+
opt.report(es.create_evaluation(objectives={"loss": loss}))
124+
125+
assert (
126+
sum(l == 0 for l in losses) > 5
127+
), "After figuring out the best of the three points, it should only propose that."
128+
129+
60130
def test_impute_nans_with_constant():
61131
x1_no_nans = torch.tensor([[0.1, 0.1], [0.7, 0.2], [1.0, 0.3]])
62132
x1_some_nans = torch.tensor([[0.1, 0.1], [0.7, float("nan")], [1.0, 0.3]])

0 commit comments

Comments
 (0)