32
32
from botorch .acquisition import AcquisitionFunction
33
33
from botorch .exceptions import BotorchTensorDimensionWarning
34
34
from botorch .models .model import Model
35
- from botorch .optim import optimize_acqf
35
+ from botorch .optim import optimize_acqf , optimize_acqf_discrete
36
36
from botorch .sampling .samplers import IIDNormalSampler
37
37
from sklearn .impute import SimpleImputer
38
38
@@ -166,27 +166,50 @@ def to_numerical(
166
166
return X , Y
167
167
168
168
169
- def init_af_opt_kwargs (af_opt_kwargs : Optional [dict ]) -> dict :
170
- """Provide default initialization for the acquisition function optimizer
171
- configuration. Ensure that all mandatory fields are set to be used by BoTorch.
169
+ def _acquisition_function_optimizer_factory (
170
+ search_space : ps .ParameterSpace ,
171
+ af_opt_kwargs : Optional [dict ],
172
+ torch_dtype : torch .dtype ,
173
+ ) -> Callable [[AcquisitionFunction ], Tuple [torch .Tensor , torch .Tensor ]]:
174
+ """Prepare either BoTorch's `optimize_acqf_discrete` or `optimize_acqf` depending
175
+ on whether the search space is fully discrete or not and set required defaults if
176
+ not overridden by `af_opt_kwargs`.
172
177
173
178
Args:
174
- af_opt_kwargs: Acquisition function configuration.
179
+ search_space: Search space used for optimization.
180
+ af_opt_kwargs: Acquisition function optimizer configuration, e.g. containing
181
+ values for `n_samples` for discrete optimization, and `num_restarts`,
182
+ `raw_samples` for the continuous optimization case.
183
+ torch_dtype: Torch tensor type.
175
184
176
185
Returns:
177
- Acquisition function optimizer configuration, applicable for BoTorch's
178
- optimizer .
186
+ Acquisition function optimizer that takes an acquisition function and returns a
187
+ candidate with its associate acquisition function value .
179
188
"""
189
+ kwargs = {} if af_opt_kwargs is None else af_opt_kwargs .copy ()
180
190
181
- af_opt_config = {} if af_opt_kwargs is None else af_opt_kwargs
182
-
183
- # number of initial samples during AF optimization
184
- af_opt_config .setdefault ("raw_samples" , 1024 )
185
-
186
- # number of restarts during AF optimization
187
- af_opt_config .setdefault ("num_restarts" , 4 )
188
-
189
- return af_opt_config
191
+ is_fully_discrete_space = not any (
192
+ search_space [n ]["parameter" ].is_continuous
193
+ for n in search_space .get_parameter_names ()
194
+ )
195
+ if is_fully_discrete_space :
196
+ choices = torch .Tensor (
197
+ [
198
+ search_space .to_numerical (search_space .sample ())
199
+ for _ in range (kwargs .pop ("n_samples" , 5_000 ))
200
+ ]
201
+ ).to (dtype = torch_dtype )
202
+ return functools .partial (optimize_acqf_discrete , q = 1 , choices = choices , ** kwargs )
203
+
204
+ return functools .partial (
205
+ optimize_acqf ,
206
+ q = 1 ,
207
+ # The numerical representation always lives on the unit hypercube
208
+ bounds = torch .tensor ([[0 , 1 ]] * len (search_space ), dtype = torch_dtype ).T ,
209
+ num_restarts = kwargs .pop ("num_restarts" , 4 ),
210
+ raw_samples = kwargs .pop ("raw_samples" , 1024 ),
211
+ ** kwargs ,
212
+ )
190
213
191
214
192
215
def filter_y_nans (
@@ -256,7 +279,8 @@ def __init__(
256
279
Providing a partially initialized class is possible with, e.g.
257
280
`functools.partial(UpperConfidenceBound, beta=6.0, maximize=False)`.
258
281
af_optimizer_kwargs: Settings for acquisition function optimizer,
259
- see `botorch.optim.optimize_acqf`.
282
+ see `botorch.optim.optimize_acqf` and in case the whole search space
283
+ is discrete: `botorch.optim.optimize_acqf_discrete`.
260
284
num_initial_random_samples: Size of the initial space-filling design that
261
285
is used before starting BO. The points are sampled randomly in the
262
286
search space. If no random sampling is required, set it to 0.
@@ -286,7 +310,7 @@ def __init__(
286
310
287
311
self .model = model
288
312
self .acquisition_function_factory = acquisition_function_factory
289
- self .af_opt_kwargs = init_af_opt_kwargs ( af_optimizer_kwargs )
313
+ self .af_optimizer_kwargs = af_optimizer_kwargs
290
314
291
315
def _create_fantasy_model (self , model : Model ) -> Model :
292
316
"""Create model with the pending specifications and model based
@@ -327,7 +351,6 @@ def _generate_evaluation_specification(self):
327
351
fantasy_model = self ._create_fantasy_model (self .model )
328
352
fantasy_model .eval ()
329
353
330
- # find next configuration by optimizing the acquisition function
331
354
af = self .acquisition_function_factory (fantasy_model )
332
355
if getattr (af , "maximize" , False ):
333
356
raise ValueError (
@@ -338,17 +361,12 @@ def _generate_evaluation_specification(self):
338
361
"acquisition_function_factory init argument."
339
362
)
340
363
341
- # numerical representation always lives on hypercube
342
- bounds = torch .tensor (
343
- [[0 , 1 ]] * len (self .search_space ), dtype = self .torch_dtype
344
- ).T
345
-
346
- configuration , _ = optimize_acqf (
347
- af ,
348
- bounds = bounds ,
349
- q = 1 ,
350
- ** self .af_opt_kwargs ,
364
+ acquisition_function_optimizer = _acquisition_function_optimizer_factory (
365
+ search_space = self .search_space ,
366
+ af_opt_kwargs = self .af_optimizer_kwargs ,
367
+ torch_dtype = self .torch_dtype ,
351
368
)
369
+ configuration , _ = acquisition_function_optimizer (af )
352
370
353
371
return EvaluationSpecification (
354
372
configuration = self .search_space .from_numerical (configuration [0 ]),
0 commit comments