Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CyclicConfigSuggestor #855

Merged
merged 18 commits into from
Nov 13, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions mlos_bench/mlos_bench/config/schedulers/cyclic_scheduler.jsonc
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
// Cyclic scheduler for the benchmarking framework.
{
"$schema": "https://raw.githubusercontent.com/microsoft/MLOS/main/mlos_bench/mlos_bench/config/schemas/schedulers/scheduler-schema.json",

"class": "mlos_bench.schedulers.CyclicScheduler",

"config": {
"trial_config_repeat_count": 1,
"max_trials": 60, // Limited only in the benchmarking logic/config.
"teardown": false
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,9 @@
"$comment": "required",
"enum": [
"mlos_bench.schedulers.SyncScheduler",
"mlos_bench.schedulers.sync_scheduler.SyncScheduler"
"mlos_bench.schedulers.sync_scheduler.SyncScheduler",
"mlos_bench.schedulers.CyclicScheduler",
"mlos_bench.schedulers.cyclic_scheduler.CyclicScheduler"
eujing marked this conversation as resolved.
Show resolved Hide resolved
]
},

Expand All @@ -83,7 +85,9 @@
"class": {
"enum": [
"mlos_bench.schedulers.SyncScheduler",
"mlos_bench.schedulers.sync_scheduler.SyncScheduler"
"mlos_bench.schedulers.sync_scheduler.SyncScheduler",
"mlos_bench.schedulers.CyclicScheduler",
"mlos_bench.schedulers.cyclic_scheduler.CyclicScheduler"
eujing marked this conversation as resolved.
Show resolved Hide resolved
]
}
},
Expand Down
2 changes: 2 additions & 0 deletions mlos_bench/mlos_bench/schedulers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@

from mlos_bench.schedulers.base_scheduler import Scheduler
from mlos_bench.schedulers.sync_scheduler import SyncScheduler
from mlos_bench.schedulers.cyclic_scheduler import CyclicScheduler
eujing marked this conversation as resolved.
Show resolved Hide resolved

__all__ = [
"Scheduler",
"SyncScheduler",
"CyclicScheduler",
eujing marked this conversation as resolved.
Show resolved Hide resolved
]
80 changes: 80 additions & 0 deletions mlos_bench/mlos_bench/schedulers/cyclic_scheduler.py
eujing marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
"""A simple single-threaded synchronous optimization loop implementation."""
import logging
from typing import Any, Dict, List
from mlos_bench.environments.base_environment import Environment
from mlos_bench.optimizers.base_optimizer import Optimizer
from mlos_bench.schedulers.sync_scheduler import SyncScheduler
from mlos_bench.storage.base_storage import Storage
from mlos_bench.util import merge_parameters

_LOG = logging.getLogger(__name__)


class CyclicScheduler(SyncScheduler):
"""
A simple single-threaded synchronous benchmarking loop implementation
eujing marked this conversation as resolved.
Show resolved Hide resolved
that cycles through a list of provided configuration IDs.

This scheduler does not use any optimizer and is mainly used for benchmarking
specific configurations against each other only.
eujing marked this conversation as resolved.
Show resolved Hide resolved

`cycle_config_ids` is the list of existing configuration IDs to cycle through.
It can be provided via the scheduler's config, or overwritten through the global config.
eujing marked this conversation as resolved.
Show resolved Hide resolved
"""

def __init__( # pylint: disable=too-many-arguments
self,
*,
config: Dict[str, Any],
global_config: Dict[str, Any],
environment: Environment,
optimizer: Optimizer,
storage: Storage,
root_env_config: str,
):
"""
Create a new instance of the scheduler. The constructor of this and the derived
eujing marked this conversation as resolved.
Show resolved Hide resolved
classes is called by the persistence service after reading the class JSON
configuration. Other objects like the Environment and Optimizer are provided by
the Launcher.
Parameters
----------
config : dict
The configuration for the scheduler.
global_config : dict
The global configuration for the experiment.
environment : Environment
The environment to benchmark/optimize.
optimizer : Optimizer
The optimizer to use.
storage : Storage
The storage to use.
root_env_config : str
Path to the root environment configuration.
"""
super().__init__(
config=config,
global_config=global_config,
environment=environment,
optimizer=optimizer,
storage=storage,
root_env_config=root_env_config,
)
config = merge_parameters(
dest=config.copy(),
source=global_config,
required_keys=["cycle_config_ids"],
)
self._cycle_config_ids: List[int] = config.get("cycle_config_ids", [0])

def _schedule_new_optimizer_suggestions(self) -> bool:
not_done = self.not_done()
if not_done:
config_id = self._cycle_config_ids[self._trial_count % len(self._cycle_config_ids)]
tunables = self.load_config(config_id)
self.schedule_trial(tunables)
return not_done
eujing marked this conversation as resolved.
Show resolved Hide resolved
Loading