Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add tests for mixed numerical type conversions of tunable config data from storage #670

Merged
merged 7 commits into from
Feb 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions doc/nginx-docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ if [ "$cmd" == 'start' ]; then
set -x
tmpdir=$(mktemp -d)
docker build --progress=plain -t mlos-doc-nginx \
--build-arg http_proxy=$http_proxy \
--build-arg https_proxy=$https_proxy \
--build-arg no_proxy=$no_proxy \
--build-arg http_proxy=${http_proxy:-} \
--build-arg https_proxy=${https_proxy:-} \
--build-arg no_proxy=${no_proxy:-} \
--build-arg NGINX_PORT=$NGINX_PORT \
-f Dockerfile "$tmpdir"
rmdir "$tmpdir"
Expand Down
1 change: 1 addition & 0 deletions mlos_bench/mlos_bench/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
# Expose some of those as local names so they can be picked up as fixtures by pytest.
tunable_groups_config = tunable_groups_fixtures.tunable_groups_config
tunable_groups = tunable_groups_fixtures.tunable_groups
mixed_numerics_tunable_groups = tunable_groups_fixtures.mixed_numerics_tunable_groups
covariant_group = tunable_groups_fixtures.covariant_group


Expand Down
3 changes: 3 additions & 0 deletions mlos_bench/mlos_bench/tests/storage/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,8 @@
# Expose some of those as local names so they can be picked up as fixtures by pytest.
storage = sql_storage_fixtures.storage
exp_storage = sql_storage_fixtures.exp_storage
mixed_numerics_exp_storage = sql_storage_fixtures.mixed_numerics_exp_storage
exp_storage_with_trials = sql_storage_fixtures.exp_storage_with_trials
mixed_numerics_exp_storage_with_trials = sql_storage_fixtures.mixed_numerics_exp_storage_with_trials
exp_data = sql_storage_fixtures.exp_data
mixed_numerics_exp_data = sql_storage_fixtures.mixed_numerics_exp_data
67 changes: 55 additions & 12 deletions mlos_bench/mlos_bench/tests/storage/sql/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from mlos_bench.optimizers.mock_optimizer import MockOptimizer
from mlos_bench.tunables.tunable_groups import TunableGroups

from mlos_bench.tests import SEED
from mlos_bench.tests.storage import CONFIG_COUNT, CONFIG_TRIAL_REPEAT_COUNT

# pylint: disable=redefined-outer-name
Expand Down Expand Up @@ -57,24 +58,41 @@ def exp_storage(storage: SqlStorage, tunable_groups: TunableGroups) -> SqlStorag


@pytest.fixture
def exp_storage_with_trials(exp_storage: SqlStorage.Experiment) -> SqlStorage.Experiment:
def mixed_numerics_exp_storage(storage: SqlStorage, mixed_numerics_tunable_groups: TunableGroups) -> SqlStorage.Experiment:
"""
Test fixture for Experiment using in-memory SQLite3 storage.
Test fixture for an Experiment with mixed numerics tunables using in-memory SQLite3 storage.
Note: It has already entered the context upon return.
"""
opt_target = "score"
opt_direction = "min"
with storage.experiment(
experiment_id="Test-002",
trial_id=1,
root_env_config="dne.jsonc",
description="pytest experiment",
tunables=mixed_numerics_tunable_groups,
opt_target=opt_target,
opt_direction=opt_direction,
) as exp:
return exp


def _dummy_run_exp(exp: SqlStorage.Experiment, tunable_name: str) -> SqlStorage.Experiment:
"""
Generates data by doing a simulated run of the given experiment.
"""
# Add some trials to that experiment.
# Note: we're just fabricating some made up function for the ML libraries to try and learn.
base_score = 10.0
tunable_name = "kernel_sched_latency_ns"
tunable = exp_storage.tunables.get_tunable(tunable_name)[0]
tunable = exp.tunables.get_tunable(tunable_name)[0]
tunable_default = tunable.default
assert isinstance(tunable_default, int)
tunable_min = tunable.range[0]
tunable_max = tunable.range[1]
tunable_range = tunable_max - tunable_min
seed = 42
rand_seed(seed)
opt = MockOptimizer(tunables=exp_storage.tunables, config={
"seed": seed,
rand_seed(SEED)
opt = MockOptimizer(tunables=exp.tunables, config={
"seed": SEED,
# This should be the default, so we leave it omitted for now to test the default.
# But the test logic relies on this (e.g., trial 1 is config 1 is the default values for the tunable params)
# "start_with_defaults": True,
Expand All @@ -83,9 +101,9 @@ def exp_storage_with_trials(exp_storage: SqlStorage.Experiment) -> SqlStorage.Ex
for config_i in range(CONFIG_COUNT):
tunables = opt.suggest()
for repeat_j in range(CONFIG_TRIAL_REPEAT_COUNT):
trial = exp_storage.new_trial(tunables=tunables.copy(), config={
"opt_target": exp_storage.opt_target,
"opt_direction": exp_storage.opt_direction,
trial = exp.new_trial(tunables=tunables.copy(), config={
"opt_target": exp.opt_target,
"opt_direction": exp.opt_direction,
"trial_number": config_i * CONFIG_TRIAL_REPEAT_COUNT + repeat_j + 1,
})
assert trial.tunable_config_id == config_i + 1
Expand All @@ -99,7 +117,24 @@ def exp_storage_with_trials(exp_storage: SqlStorage.Experiment) -> SqlStorage.Ex
# And some influence from the tunable value.
"score": tunable_value_norm + random() / 100
})
return exp_storage
return exp


@pytest.fixture
def exp_storage_with_trials(exp_storage: SqlStorage.Experiment) -> SqlStorage.Experiment:
"""
Test fixture for Experiment using in-memory SQLite3 storage.
"""
return _dummy_run_exp(exp_storage, tunable_name="kernel_sched_latency_ns")


@pytest.fixture
def mixed_numerics_exp_storage_with_trials(mixed_numerics_exp_storage: SqlStorage.Experiment) -> SqlStorage.Experiment:
"""
Test fixture for Experiment using in-memory SQLite3 storage.
"""
tunable = next(iter(mixed_numerics_exp_storage.tunables))[0]
return _dummy_run_exp(mixed_numerics_exp_storage, tunable_name=tunable.name)


@pytest.fixture
Expand All @@ -108,3 +143,11 @@ def exp_data(storage: SqlStorage, exp_storage_with_trials: SqlStorage.Experiment
Test fixture for ExperimentData.
"""
return storage.experiments[exp_storage_with_trials.experiment_id]


@pytest.fixture
def mixed_numerics_exp_data(storage: SqlStorage, mixed_numerics_exp_storage_with_trials: SqlStorage.Experiment) -> ExperimentData:
"""
Test fixture for ExperimentData with mixed numerical tunable types.
"""
return storage.experiments[mixed_numerics_exp_storage_with_trials.experiment_id]
13 changes: 13 additions & 0 deletions mlos_bench/mlos_bench/tests/storage/tunable_config_data_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,16 @@ def test_trial_data_tunable_config_data(exp_data: ExperimentData,
# The first should be the defaults.
assert tunable_config.config_dict == tunable_groups.get_param_values()
assert trial.tunable_config_trial_group.tunable_config == tunable_config


def test_mixed_numerics_exp_trial_data(
mixed_numerics_exp_data: ExperimentData,
mixed_numerics_tunable_groups: TunableGroups) -> None:
"""
Tests that data type conversions are retained when loading experiment data with
mixed numeric tunable types.
"""
trial = next(iter(mixed_numerics_exp_data.trials.values()))
config = trial.tunable_config.config_dict
for (tunable, _group) in mixed_numerics_tunable_groups:
assert isinstance(config[tunable.name], tunable.dtype)
33 changes: 33 additions & 0 deletions mlos_bench/mlos_bench/tests/tunable_groups_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,3 +108,36 @@ def covariant_group(tunable_groups: TunableGroups) -> CovariantTunableGroup:
"""
(_, covariant_group) = next(iter(tunable_groups))
return covariant_group


@pytest.fixture
def mixed_numerics_tunable_groups() -> TunableGroups:
"""
A test fixture with mixed numeric tunable groups to test type conversions.

Returns
-------
tunable_groups : TunableGroups
A new TunableGroups object for testing.
"""
tunables = TunableGroups({
"mix-numerics": {
"cost": 1,
"params": {
"int": {
"description": "An integer",
"type": "int",
"default": 0,
"range": [0, 100],
},
"float": {
"description": "A float",
"type": "float",
"default": 0,
"range": [0, 1],
},
}
},
})
tunables.reset()
return tunables
2 changes: 1 addition & 1 deletion mlos_core/mlos_core/tests/optimizers/optimizer_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ def test_optimizer_type_defs(optimizer_class: Type[BaseOptimizer]) -> None:
*[(member, {}) for member in OptimizerType],
# Optimizer with non-empty kwargs argument
])
def test_mixed_numeric_type_input_space_types(optimizer_type: Optional[OptimizerType], kwargs: Optional[dict]) -> None:
def test_mixed_numerics_type_input_space_types(optimizer_type: Optional[OptimizerType], kwargs: Optional[dict]) -> None:
"""
Toy problem to test the optimizers with mixed numeric types to ensure that original dtypes are retained.
"""
Expand Down
Loading