Skip to content

Commit 6ba233d

Browse files
new: first draft of the documentation
1 parent 3c1751e commit 6ba233d

File tree

159 files changed

+58137
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

159 files changed

+58137
-0
lines changed

.buildinfo

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# Sphinx build info version 1
2+
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
3+
config: 6ab4861fb0ec34198af10ae911faedff
4+
tags: 645f666f9bcd5a90fca523b33c5a78b7

.nojekyll

Whitespace-only changes.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# PyBADS Example 1: Basic usage
2+
# (code only - see Jupyter notebook for the tutorial)
3+
4+
import numpy as np
5+
6+
from pybads import BADS
7+
8+
9+
def rosenbrocks_fcn(x):
10+
"""Rosenbrock's 'banana' function in any dimension."""
11+
x_2d = np.atleast_2d(x)
12+
return np.sum(
13+
100 * (x_2d[:, 0:-1] ** 2 - x_2d[:, 1:]) ** 2
14+
+ (x_2d[:, 0:-1] - 1) ** 2,
15+
axis=1,
16+
)
17+
18+
19+
target = rosenbrocks_fcn
20+
21+
lb = np.array([-20, -20]) # Lower bounds
22+
ub = np.array([20, 20]) # Upper bounds
23+
plb = np.array([-5, -5]) # Plausible lower bounds
24+
pub = np.array([5, 5]) # Plausible upper bounds
25+
x0 = np.array([0, 0])
26+
# Starting point
27+
28+
bads = BADS(target, x0, lb, ub, plb, pub)
29+
optimize_result = bads.optimize()
30+
31+
x_min = optimize_result["x"]
32+
fval = optimize_result["fval"]
33+
34+
print(f"BADS minimum at: x_min = {x_min.flatten()}, fval = {fval:.4g}")
35+
print(
36+
f"total f-count: {optimize_result['func_count']}, time: {round(optimize_result['total_time'], 2)} s"
37+
)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
# PyBADS Example 5: Extended usage
2+
# (code only - see Jupyter notebook for a tutorial)
3+
4+
import numpy as np
5+
6+
from pybads import BADS
7+
8+
9+
def camelback6(x):
10+
"""Six-hump camelback function."""
11+
x_2d = np.atleast_2d(x)
12+
x1 = x_2d[:, 0]
13+
x2 = x_2d[:, 1]
14+
f = (
15+
(4 - 2.1 * (x1 * x1) + (x1 * x1 * x1 * x1) / 3.0) * (x1 * x1)
16+
+ x1 * x2
17+
+ (-4 + 4 * (x2 * x2)) * (x2 * x2)
18+
)
19+
return f
20+
21+
22+
lb = np.array([-3, -2]) # Lower bounds
23+
ub = np.array([3, 2]) # Upper bounds
24+
plb = np.array([-2.9, -1.9]) # Plausible lower bounds
25+
pub = np.array([2.9, 1.9]) # Plausible upper bounds
26+
27+
options = {
28+
"display": "off", # We switch off the printing
29+
"uncertainty_handling": False, # Good to specify that this is a deterministic function
30+
}
31+
32+
num_opts = 10
33+
optimize_results = []
34+
x_vec = np.zeros((num_opts, lb.shape[0]))
35+
fval_vec = np.zeros(num_opts)
36+
37+
for opt_count in range(num_opts):
38+
print("Running optimization " + str(opt_count) + "...")
39+
x0 = np.random.uniform(low=plb, high=pub)
40+
bads = BADS(camelback6, x0, lb, ub, plb, pub, options=options)
41+
optimize_results.append(bads.optimize())
42+
x_vec[opt_count] = optimize_results[opt_count].x
43+
fval_vec[opt_count] = optimize_results[opt_count].fval
44+
45+
print("Found solutions:")
46+
print(x_vec)
47+
48+
print("Function values at solutions:")
49+
print(fval_vec)
50+
51+
idx_best = np.argmin(fval_vec)
52+
result_best = optimize_results[idx_best]
53+
54+
x_min = result_best["x"]
55+
fval = result_best["fval"]
56+
57+
print(f"BADS minimum at x_min = {x_min.flatten()}")
58+
print(f"Function value at minimum fval = {fval}")
59+
60+
result_best
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
# PyBADS Example 4: Noisy objective with user-provided noise estimates
2+
# (code only - see Jupyter notebook for a tutorial)
3+
4+
import numpy as np
5+
6+
from pybads import BADS
7+
8+
9+
def noisy_sphere_estimated_noise(x, scale=1.0):
10+
"""Quadratic function with heteroskedastic noise; also return noise estimate."""
11+
x_2d = np.atleast_2d(x)
12+
f = np.sum(x_2d**2, axis=1)
13+
sigma = scale * (1.0 + np.sqrt(f))
14+
y = f + sigma * np.random.normal(size=x_2d.shape[0])
15+
return y, sigma
16+
17+
18+
x0 = np.array([-3, -3])
19+
# Starting point
20+
lb = np.array([-5, -5]) # Lower bounds
21+
ub = np.array([5, 5]) # Upper bounds
22+
plb = np.array([-2, -2]) # Plausible lower bounds
23+
pub = np.array([2, 2]) # Plausible upper bounds
24+
25+
options = {
26+
"uncertainty_handling": True,
27+
"specify_target_noise": True,
28+
"noise_final_samples": 100,
29+
}
30+
31+
bads = BADS(
32+
noisy_sphere_estimated_noise, x0, lb, ub, plb, pub, options=options
33+
)
34+
optimize_result = bads.optimize()
35+
36+
x_min = optimize_result["x"]
37+
fval = optimize_result["fval"]
38+
fsd = optimize_result["fsd"]
39+
40+
x_min = optimize_result["x"]
41+
fval = optimize_result["fval"]
42+
fsd = optimize_result["fsd"]
43+
44+
print(
45+
f"BADS minimum at: x_min = {x_min.flatten()}, fval (estimated) = {fval:.4g} +/- {fsd:.2g}"
46+
)
47+
print(
48+
f"total f-count: {optimize_result['func_count']}, time: {round(optimize_result['total_time'], 2)} s"
49+
)
50+
print(f"final evaluations (shape): {optimize_result['yval_vec'].shape}")
51+
print(f"final evaluations SD (shape): {optimize_result['ysd_vec'].shape}")
52+
53+
print(
54+
f"The true, noiseless value of f(x_min) is {noisy_sphere_estimated_noise(x_min,scale=0)[0][0]:.3g}."
55+
)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
# PyBADS Example 3: Noisy objective function
2+
# (code only - see Jupyter notebook for a tutorial)
3+
4+
import numpy as np
5+
6+
from pybads import BADS
7+
8+
9+
def noisy_sphere(x, sigma=1.0):
10+
"""Simple quadratic function with added noise."""
11+
x_2d = np.atleast_2d(x)
12+
f = np.sum(x_2d**2, axis=1)
13+
noise = sigma * np.random.normal(size=x_2d.shape[0])
14+
return f + noise
15+
16+
17+
x0 = np.array([-3, -3])
18+
# Starting point
19+
lb = np.array([-5, -5]) # Lower bounds
20+
ub = np.array([5, 5]) # Upper bounds
21+
plb = np.array([-2, -2]) # Plausible lower bounds
22+
pub = np.array([2, 2]) # Plausible upper bounds
23+
24+
options = {
25+
"uncertainty_handling": True,
26+
"max_fun_evals": 300,
27+
"noise_final_samples": 100,
28+
}
29+
30+
bads = BADS(noisy_sphere, x0, lb, ub, plb, pub, options=options)
31+
optimize_result = bads.optimize()
32+
33+
x_min = optimize_result["x"]
34+
fval = optimize_result["fval"]
35+
fsd = optimize_result["fsd"]
36+
37+
print(
38+
f"BADS minimum at: x_min = {x_min.flatten()}, fval (estimated) = {fval:.4g} +/- {fsd:.2g}"
39+
)
40+
print(
41+
f"total f-count: {optimize_result['func_count']}, time: {round(optimize_result['total_time'], 2)} s"
42+
)
43+
print(f"final evaluations (shape): {optimize_result['yval_vec'].shape}")
44+
45+
print(
46+
f"The true, noiseless value of f(x_min) is {noisy_sphere(x_min,sigma=0)[0]:.3g}."
47+
)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
# PyBADS Example 2: Non-box constraints
2+
# (code only - see Jupyter notebook for the tutorial)
3+
4+
import numpy as np
5+
6+
from pybads import BADS
7+
8+
9+
def rosenbrocks_fcn(x):
10+
"""Rosenbrock's 'banana' function in any dimension."""
11+
x_2d = np.atleast_2d(x)
12+
return np.sum(
13+
100 * (x_2d[:, 0:-1] ** 2 - x_2d[:, 1:]) ** 2
14+
+ (x_2d[:, 0:-1] - 1) ** 2,
15+
axis=1,
16+
)
17+
18+
19+
x0 = np.array([0, 0])
20+
# Starting point
21+
lb = np.array([-1, -1]) # Lower bounds
22+
ub = np.array([1, 1]) # Upper bounds
23+
24+
25+
def circle_constr(x):
26+
"""Return constraints violation outside the unit circle."""
27+
x_2d = np.atleast_2d(x)
28+
# Note that nonboxcons assumes the function takes a 2D input
29+
return np.sum(x_2d**2, axis=1) > 1
30+
31+
32+
bads = BADS(rosenbrocks_fcn, x0, lb, ub, non_box_cons=circle_constr)
33+
optimize_result = bads.optimize()
34+
35+
x_min = optimize_result["x"]
36+
fval = optimize_result["fval"]
37+
38+
print(f"BADS minimum at: x_min = {x_min.flatten()}, fval = {fval:.4g}")
39+
print(
40+
f"total f-count: {optimize_result['func_count']}, time: {round(optimize_result['total_time'], 2)} s"
41+
)
42+
print(f"Problem type: {optimize_result['problem_type']}")

0 commit comments

Comments
 (0)