Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CyclicConfigSuggestor #855

Merged
merged 18 commits into from
Nov 13, 2024
Merged
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions mlos_bench/mlos_bench/config/optimizers/manual_opt.jsonc
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
// Manual optimizer to run fixed set of tunable values on repeat via the benchmarking framework.
{
"$schema": "https://raw.githubusercontent.com/microsoft/MLOS/main/mlos_bench/mlos_bench/config/schemas/optimizers/optimizer-schema.json",

"class": "mlos_bench.optimizers.ManualOptimizer",

"config": {
"max_cycles": 30,
"tunable_values_cycle": []
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@
"description": "The name of the optimizer class to use.",
"$comment": "required",
"enum": [
"mlos_bench.optimizers.ManualOptimizer",
"mlos_bench.optimizers.manual_optimizer.ManualOptimizer",
"mlos_bench.optimizers.MlosCoreOptimizer",
"mlos_bench.optimizers.mlos_core_optimizer.MlosCoreOptimizer",
"mlos_bench.optimizers.GridSearchOptimizer",
Expand Down Expand Up @@ -201,6 +203,51 @@
}
},
"else": false
},

{
"$comment": "extensions to the 'config' object properties when the manual optimizer is being used",
"if": {
"properties": {
"class": {
"enum": [
"mlos_bench.optimizers.ManualOptimizer",
"mlos_bench.optimizers.manual_optimizer.ManualOptimizer"
]
}
},
"required": ["class"]
},
"then": {
"properties": {
"config": {
"type": "object",
"allOf": [
{ "$ref": "#/$defs/config_base_optimizer" },
{
"type": "object",
"properties": {
"max_cycles": {
"description": "The maximum number of cycles of tunable values to run the optimizer for.",
"type": "integer",
"minimum": 1
},
"tunable_values_cycle": {
"description": "The tunable values to cycle through.",
"type": "array",
"items": {
"$ref": "../tunables/tunable-values-schema.json#/$defs/tunable_values_set"
}
}
}
}
],
"$comment": "disallow other properties",
"unevaluatedProperties": false
}
}
},
"else": false
bpkroth marked this conversation as resolved.
Show resolved Hide resolved
}
],
"unevaluatedProperties": false
Expand Down
2 changes: 2 additions & 0 deletions mlos_bench/mlos_bench/optimizers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@
"""Interfaces and wrapper classes for optimizers to be used in Autotune."""

from mlos_bench.optimizers.base_optimizer import Optimizer
from mlos_bench.optimizers.manual_optimizer import ManualOptimizer
from mlos_bench.optimizers.mlos_core_optimizer import MlosCoreOptimizer
from mlos_bench.optimizers.mock_optimizer import MockOptimizer
from mlos_bench.optimizers.one_shot_optimizer import OneShotOptimizer

__all__ = [
"Optimizer",
"ManualOptimizer",
"MockOptimizer",
"OneShotOptimizer",
"MlosCoreOptimizer",
Expand Down
51 changes: 51 additions & 0 deletions mlos_bench/mlos_bench/optimizers/manual_optimizer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
"""Optimizer for mlos_bench that proposes an explicit sequence of configuration."""
motus marked this conversation as resolved.
Show resolved Hide resolved

import logging
from typing import Dict, List, Optional

from mlos_bench.optimizers.mock_optimizer import MockOptimizer
from mlos_bench.services.base_service import Service
from mlos_bench.tunables.tunable import TunableValue
from mlos_bench.tunables.tunable_groups import TunableGroups

_LOG = logging.getLogger(__name__)


class ManualOptimizer(MockOptimizer):
"""Optimizer that proposes an explicit sequence of tunable values."""

def __init__(
self,
tunables: TunableGroups,
config: dict,
global_config: Optional[dict] = None,
service: Optional[Service] = None,
):
super().__init__(tunables, config, global_config, service)
bpkroth marked this conversation as resolved.
Show resolved Hide resolved
self._tunable_values_cycle: List[Dict[str, TunableValue]] = config.get(
"tunable_values_cycle", []
)
if len(self._tunable_values_cycle) == 0:
_LOG.warning("No tunable_values_cycle provided, using default values.")
self._tunable_values_cycle = [tunables.get_param_values()]
max_cycles = int(config.get("max_cycles", 1))
self._max_suggestions = min(
self._max_suggestions,
max_cycles * len(self._tunable_values_cycle),
)

def suggest(self) -> TunableGroups:
"""Always produce the same sequence of explicit suggestions, in a cycle."""
tunables = super().suggest()
cycle_index = (self._iter - 1) % len(self._tunable_values_cycle)
tunables.assign(self._tunable_values_cycle[cycle_index])
_LOG.info("Iteration %d :: Suggest: %s", self._iter, tunables)
return tunables

@property
def supports_preload(self) -> bool:
return False
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{

"class": "mlos_bench.optimizers.ManualOptimizer",

"config": {
// max_cycles should be at least 1
"max_cycles": 0,
"tunable_values_cycle": []
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{

"class": "mlos_bench.optimizers.ManualOptimizer",

"config": {}
}
motus marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"class": "mlos_bench.optimizers.ManualOptimizer",

"config": {
"tunable_values_cycle": [],
"extra_param": "should not be here"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
{
"class": "mlos_bench.optimizers.ManualOptimizer",

"config": {
// Here we do our best to list the exhaustive set of full configs available for the base optimizer config.
"optimization_targets": {"score": "min"},
"max_suggestions": 20,
"seed": 12345,
"start_with_defaults": false,
"max_cycles": 10,
"tunable_values_cycle": [
{
"param1": "value1",
"param2": 1,
"param3": false
},
{
"param1": "value2",
"param2": 2,
"param3": true
},
]
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"class": "mlos_bench.optimizers.ManualOptimizer"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{

"class": "mlos_bench.optimizers.ManualOptimizer",
"config": {
"tunable_values_cycle": []
}
}
16 changes: 16 additions & 0 deletions mlos_bench/mlos_bench/tests/optimizers/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,14 @@

import pytest

from mlos_bench.optimizers.manual_optimizer import ManualOptimizer
from mlos_bench.optimizers.mlos_core_optimizer import MlosCoreOptimizer
from mlos_bench.optimizers.mock_optimizer import MockOptimizer
from mlos_bench.tests import SEED
from mlos_bench.tunables.tunable_groups import TunableGroups

# pylint: disable=redefined-outer-name


@pytest.fixture
def mock_configs() -> List[dict]:
Expand Down Expand Up @@ -154,3 +157,16 @@ def smac_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
"max_ratio": 1.0,
},
)


@pytest.fixture
def manual_opt(tunable_groups: TunableGroups, mock_configs: List[dict]) -> ManualOptimizer:
"""Test fixture for ManualOptimizer."""
return ManualOptimizer(
tunables=tunable_groups,
service=None,
config={
"max_cycles": 2,
"tunable_values_cycle": mock_configs,
},
)
19 changes: 19 additions & 0 deletions mlos_bench/mlos_bench/tests/optimizers/manual_opt_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
"""Unit tests for mock mlos_bench optimizer."""

from mlos_bench.environments.status import Status
from mlos_bench.optimizers.manual_optimizer import ManualOptimizer

# pylint: disable=redefined-outer-name


def test_manual_optimizer(manual_opt: ManualOptimizer, mock_configs: list) -> None:
"""Make sure that manual optimizer produces consistent suggestions."""

for i in range(2 * len(mock_configs)):
eujing marked this conversation as resolved.
Show resolved Hide resolved
tunables = manual_opt.suggest()
assert tunables.get_param_values() == mock_configs[i % len(mock_configs)]
manual_opt.register(tunables, Status.SUCCEEDED, {"score": 123.0})
Loading