Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
e63f621
start migration to configspace 1.0
motus Jul 16, 2024
89fa914
migrate a few more calls to 1.0 API
motus Jul 16, 2024
78186fd
proper parameters for NormalIntegerHyperparameter constructor
motus Jul 16, 2024
1bab63e
ConfigSpace.Configuration 1.0 validates even inactive parameters - ne…
motus Jul 17, 2024
707d995
cosmetic fixes
motus Jul 17, 2024
148b64c
better fixture for ConfigurationSpace (1.0 compatible)
motus Jul 17, 2024
9478b49
Merge branch 'main' of https://github.com/microsoft/MLOS into sergiym…
motus Jul 17, 2024
3edd6c1
black formatting
motus Jul 17, 2024
341a5ea
remove quantization parameters
motus Jul 17, 2024
03970b6
use check_valid_configuration() instead of check_configuration()
motus Jul 17, 2024
387f516
Merge branch 'main' into sergiym/opt/configspace-1.0
bpkroth Jul 22, 2024
2a5b2d2
Merge branch 'main' of https://github.com/microsoft/MLOS into sergiym…
motus Jul 25, 2024
cf2ad53
update to SMAC3 v 2.2
motus Jul 25, 2024
18e42a2
use NotSet value for CategoricalHyperparameter default_value
motus Jul 25, 2024
6413f8d
mypy fixes
motus Jul 25, 2024
8939e3f
Merge branch 'main' into sergiym/opt/configspace-1.0
motus Jul 29, 2024
1f8711c
implement drop_nulls
motus Jul 29, 2024
750834d
black formatting fixes
motus Jul 29, 2024
4588f7d
isort fixes
motus Jul 29, 2024
2ecdb12
mypy fixes
motus Jul 29, 2024
1fab14d
more mypy fixes
motus Jul 30, 2024
95f127e
final(?) mypy fixes
motus Jul 30, 2024
b3731cf
remove pylint and mypy workarounds for ConfigSpace
motus Jul 30, 2024
22c5589
debugging `make doc` failure
bpkroth Jul 31, 2024
0727684
ignore a deprecation warning context message generated by asyncssh
bpkroth Jul 31, 2024
33afe04
Quantization support tracking comments
bpkroth Jul 31, 2024
42b0c7f
formatting
bpkroth Jul 31, 2024
852f685
Update mlos_core/setup.py
bpkroth Jul 31, 2024
96ad677
remove extra to_vector call
motus Jul 31, 2024
fa3f7a5
restrict matplotlib version to < 2.9
motus Jul 31, 2024
f75c8d2
bugfix: incorrect version of matplotlib
motus Aug 1, 2024
34d8c57
Merge branch 'main' of https://github.com/microsoft/MLOS into sergiym…
motus Aug 1, 2024
903e3bf
Merge branch 'main' into sergiym/opt/configspace-1.0
motus Aug 1, 2024
0b334d2
Merge branch 'main' of https://github.com/microsoft/MLOS into sergiym…
motus Aug 2, 2024
f3c28a7
Merge branch 'sergiym/opt/configspace-1.0' of https://github.com/motu…
motus Aug 2, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion conda-envs/mlos-windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ dependencies:
- jupyter
- ipykernel
- nb_conda_kernels
- matplotlib
- matplotlib<3.9
- matplotlib-base<3.9
- seaborn
- pandas
- pyarrow
Expand Down
35 changes: 26 additions & 9 deletions mlos_bench/mlos_bench/optimizers/convert_configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,30 @@
"""

import logging
from typing import Dict, List, Optional, Tuple, Union
from typing import Dict, Hashable, List, Optional, Tuple, Union

from ConfigSpace import (
Beta,
BetaFloatHyperparameter,
BetaIntegerHyperparameter,
CategoricalHyperparameter,
Configuration,
ConfigurationSpace,
EqualsCondition,
Float,
Integer,
Normal,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
Uniform,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
from ConfigSpace.types import NotSet

from mlos_bench.tunables.tunable import Tunable, TunableValue
from mlos_bench.tunables.tunable_groups import TunableGroups
from mlos_bench.util import nullable, try_parse_val
from mlos_bench.util import try_parse_val

_LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -70,7 +77,9 @@ def _tunable_to_configspace(
cs : ConfigurationSpace
A ConfigurationSpace object that corresponds to the Tunable.
"""
meta = {"group": group_name, "cost": cost} # {"scaling": ""}
meta: Dict[Hashable, TunableValue] = {"cost": cost}
if group_name is not None:
meta["group"] = group_name

if tunable.type == "categorical":
return ConfigurationSpace(
Expand Down Expand Up @@ -101,12 +110,20 @@ def _tunable_to_configspace(
elif tunable.distribution is not None:
raise TypeError(f"Invalid Distribution Type: {tunable.distribution}")

range_hp: Union[
BetaFloatHyperparameter,
BetaIntegerHyperparameter,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
]
if tunable.type == "int":
range_hp = Integer(
name=tunable.name,
bounds=(int(tunable.range[0]), int(tunable.range[1])),
log=bool(tunable.is_log),
q=nullable(int, tunable.quantization),
# TODO: Restore quantization support (#803).
distribution=distribution,
default=(
int(tunable.default)
Expand All @@ -120,8 +137,8 @@ def _tunable_to_configspace(
name=tunable.name,
bounds=tunable.range,
log=bool(tunable.is_log),
q=tunable.quantization, # type: ignore[arg-type]
distribution=distribution, # type: ignore[arg-type]
# TODO: Restore quantization support (#803).
distribution=distribution,
default=(
float(tunable.default)
if tunable.in_range(tunable.default) and tunable.default is not None
Expand Down Expand Up @@ -152,7 +169,7 @@ def _tunable_to_configspace(
name=special_name,
choices=tunable.special,
weights=special_weights,
default_value=tunable.default if tunable.default in tunable.special else None,
default_value=tunable.default if tunable.default in tunable.special else NotSet,
meta=meta,
),
type_name: CategoricalHyperparameter(
Expand All @@ -163,10 +180,10 @@ def _tunable_to_configspace(
),
}
)
conf_space.add_condition(
conf_space.add(
EqualsCondition(conf_space[special_name], conf_space[type_name], TunableValueKind.SPECIAL)
)
conf_space.add_condition(
conf_space.add(
EqualsCondition(conf_space[tunable.name], conf_space[type_name], TunableValueKind.RANGE)
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
CategoricalHyperparameter,
ConfigurationSpace,
EqualsCondition,
Integer,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
Expand Down Expand Up @@ -40,45 +41,66 @@ def configuration_space() -> ConfigurationSpace:
special_param_names("kernel_sched_migration_cost_ns")
)

# TODO: Add quantization support tests (#803).

# NOTE: FLAML requires distribution to be uniform
spaces = ConfigurationSpace(
space={
"vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"],
"idle": ["halt", "mwait", "noidle"],
"kernel_sched_migration_cost_ns": (0, 500000),
kernel_sched_migration_cost_ns_special: [-1, 0],
kernel_sched_migration_cost_ns_type: [
TunableValueKind.SPECIAL,
TunableValueKind.RANGE,
],
"kernel_sched_latency_ns": (0, 1000000000),
{
"vmSize": CategoricalHyperparameter(
name="vmSize",
choices=["Standard_B2s", "Standard_B2ms", "Standard_B4ms"],
default_value="Standard_B4ms",
meta={"group": "provision", "cost": 0},
),
"idle": CategoricalHyperparameter(
name="idle",
choices=["halt", "mwait", "noidle"],
default_value="halt",
meta={"group": "boot", "cost": 0},
),
"kernel_sched_latency_ns": Integer(
name="kernel_sched_latency_ns",
bounds=(0, 1000000000),
log=False,
default=2000000,
meta={"group": "kernel", "cost": 0},
),
"kernel_sched_migration_cost_ns": Integer(
name="kernel_sched_migration_cost_ns",
bounds=(0, 500000),
log=False,
default=250000,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_special: CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_special,
choices=[-1, 0],
weights=[0.5, 0.5],
default_value=-1,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_type: CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_type,
choices=[TunableValueKind.SPECIAL, TunableValueKind.RANGE],
weights=[0.5, 0.5],
default_value=TunableValueKind.SPECIAL,
),
}
)

# NOTE: FLAML requires distribution to be uniform
spaces["vmSize"].default_value = "Standard_B4ms"
spaces["idle"].default_value = "halt"
spaces["kernel_sched_migration_cost_ns"].default_value = 250000
spaces[kernel_sched_migration_cost_ns_special].default_value = -1
spaces[kernel_sched_migration_cost_ns_special].probabilities = (0.5, 0.5)
spaces[kernel_sched_migration_cost_ns_type].default_value = TunableValueKind.SPECIAL
spaces[kernel_sched_migration_cost_ns_type].probabilities = (0.5, 0.5)
spaces["kernel_sched_latency_ns"].default_value = 2000000

spaces.add_condition(
spaces.add(
EqualsCondition(
spaces[kernel_sched_migration_cost_ns_special],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.SPECIAL,
)
)
spaces.add_condition(
spaces.add(
EqualsCondition(
spaces["kernel_sched_migration_cost_ns"],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.RANGE,
)
)

return spaces


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
)
from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.spaces.adapters.identity_adapter import IdentityAdapter
from mlos_core.util import drop_nulls


class SmacOptimizer(BaseBayesianOptimizer):
Expand Down Expand Up @@ -350,8 +351,11 @@ def _suggest(
warn(f"Not Implemented: Ignoring context {list(context.columns)}", UserWarning)

trial: TrialInfo = self.base_optimizer.ask()
trial.config.is_valid_configuration()
self.optimizer_parameter_space.check_configuration(trial.config)
trial.config.check_valid_configuration()
ConfigSpace.Configuration(
self.optimizer_parameter_space,
values=trial.config,
).check_valid_configuration()
assert trial.config.config_space == self.optimizer_parameter_space
self.trial_info_map[trial.config] = trial
config_df = pd.DataFrame(
Expand Down Expand Up @@ -441,6 +445,11 @@ def _to_configspace_configs(self, *, configs: pd.DataFrame) -> List[ConfigSpace.
List of ConfigSpace configs.
"""
return [
ConfigSpace.Configuration(self.optimizer_parameter_space, values=config.to_dict())
ConfigSpace.Configuration(
self.optimizer_parameter_space,
# Remove None values for inactive parameters
values=drop_nulls(config.to_dict()),
allow_inactive_with_values=False,
)
for (_, config) in configs.astype("O").iterrows()
]
9 changes: 6 additions & 3 deletions mlos_core/mlos_core/optimizers/flaml_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from mlos_core.optimizers.optimizer import BaseOptimizer
from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.util import normalize_config
from mlos_core.util import drop_nulls, normalize_config


class EvaluatedSample(NamedTuple):
Expand Down Expand Up @@ -124,13 +124,16 @@ def _register(
warn(f"Not Implemented: Ignoring metadata {list(metadata.columns)}", UserWarning)

for (_, config), (_, score) in zip(configs.astype("O").iterrows(), scores.iterrows()):
# Remove None values for inactive config parameters
config_dict = drop_nulls(config.to_dict())
cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration(
self.optimizer_parameter_space, values=config.to_dict()
self.optimizer_parameter_space,
values=config_dict,
)
if cs_config in self.evaluated_samples:
warn(f"Configuration {config} was already registered", UserWarning)
self.evaluated_samples[cs_config] = EvaluatedSample(
config=config.to_dict(),
config=config_dict,
score=float(np.average(score.astype(float), weights=self._objective_weights)),
)

Expand Down
37 changes: 20 additions & 17 deletions mlos_core/mlos_core/spaces/adapters/llamatune.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
#
"""Implementation of LlamaTune space adapter."""
import os
from typing import Dict, Optional
from typing import Dict, List, Optional, Union
from warnings import warn

import ConfigSpace
Expand All @@ -16,7 +16,7 @@
from sklearn.preprocessing import MinMaxScaler

from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.util import normalize_config
from mlos_core.util import drop_nulls, normalize_config


class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-attributes
Expand Down Expand Up @@ -102,7 +102,7 @@ def inverse_transform(self, configurations: pd.DataFrame) -> pd.DataFrame:
for _, config in configurations.astype("O").iterrows():
configuration = ConfigSpace.Configuration(
self.orig_parameter_space,
values=config.to_dict(),
values=drop_nulls(config.to_dict()),
)

target_config = self._suggested_configs.get(configuration, None)
Expand Down Expand Up @@ -222,7 +222,10 @@ def _try_inverse_transform_config(

# But the inverse mapping should at least be valid in the target space.
try:
self.target_parameter_space.check_configuration(target_config)
ConfigSpace.Configuration(
self.target_parameter_space,
values=target_config,
).check_valid_configuration()
except ConfigSpace.exceptions.IllegalValueError as e:
raise ValueError(
f"Invalid configuration {target_config} generated by "
Expand All @@ -249,7 +252,10 @@ def transform(self, configuration: pd.DataFrame) -> pd.DataFrame:

# Validate that the configuration is in the original space.
try:
self.orig_parameter_space.check_configuration(orig_configuration)
ConfigSpace.Configuration(
self.orig_parameter_space,
values=orig_configuration,
).check_valid_configuration()
except ConfigSpace.exceptions.IllegalValueError as e:
raise ValueError(
f"Invalid configuration {orig_configuration} generated by "
Expand Down Expand Up @@ -282,6 +288,9 @@ def _construct_low_dim_space(
"""
# Define target space parameters
q_scaler = None
hyperparameters: List[
Union[ConfigSpace.UniformFloatHyperparameter, ConfigSpace.UniformIntegerHyperparameter]
]
if max_unique_values_per_param is None:
hyperparameters = [
ConfigSpace.UniformFloatHyperparameter(name=f"dim_{idx}", lower=-1, upper=1)
Expand Down Expand Up @@ -316,7 +325,7 @@ def _construct_low_dim_space(
config_space = ConfigSpace.ConfigurationSpace(name=self.orig_parameter_space.name)
# use same random state as in original parameter space
config_space.random = self._random_state
config_space.add_hyperparameters(hyperparameters)
config_space.add(hyperparameters)
self._target_config_space = config_space

def _transform(self, configuration: dict) -> dict:
Expand Down Expand Up @@ -366,7 +375,7 @@ def _transform(self, configuration: dict) -> dict:
if param.name in self._special_param_values_dict:
value = self._special_param_value_scaler(param, value)

orig_value = param._transform(value) # pylint: disable=protected-access
orig_value = param.to_value(value)
orig_value = np.clip(orig_value, param.lower, param.upper)
else:
raise NotImplementedError(
Expand All @@ -379,7 +388,7 @@ def _transform(self, configuration: dict) -> dict:

def _special_param_value_scaler(
self,
param: ConfigSpace.UniformIntegerHyperparameter,
param: NumericalHyperparameter,
input_value: float,
) -> float:
"""
Expand All @@ -388,7 +397,7 @@ def _special_param_value_scaler(

Parameters
----------
param: ConfigSpace.UniformIntegerHyperparameter
param: NumericalHyperparameter
Parameter of the original parameter space.

input_value: float
Expand All @@ -403,19 +412,13 @@ def _special_param_value_scaler(

# Check if input value corresponds to some special value
perc_sum = 0.0
ret: float
for special_value, biasing_perc in special_values_list:
perc_sum += biasing_perc
if input_value < perc_sum:
ret = param._inverse_transform(special_value) # pylint: disable=protected-access
return ret
return float(param.to_vector(special_value))

# Scale input value uniformly to non-special values
# pylint: disable=protected-access
ret = param._inverse_transform(
param._transform_scalar((input_value - perc_sum) / (1 - perc_sum))
)
return ret
return float(param.to_vector((input_value - perc_sum) / (1 - perc_sum)))

# pylint: disable=too-complex,too-many-branches
def _validate_special_param_values(self, special_param_values_dict: dict) -> None:
Expand Down
Loading