Skip to content

Commit

Permalink
update: test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
kozistr committed Dec 21, 2024
1 parent 0723785 commit f78be4b
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 2 deletions.
5 changes: 5 additions & 0 deletions tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
FTRL,
LARS,
MADGRAD,
MARS,
MSVAG,
OPTIMIZERS,
PID,
Expand Down Expand Up @@ -151,6 +152,7 @@
'muon',
'laprop',
'apollo',
'mars',
]

VALID_LR_SCHEDULER_NAMES: List[str] = [
Expand Down Expand Up @@ -532,6 +534,9 @@
},
15,
),
(MARS, {'lr': 5e-1, 'lr_1d': 5e-1, 'weight_decay': 1e-3, 'mars_type': 'adamw'}, 5),
(MARS, {'lr': 1e-1, 'weight_decay': 1e-3, 'mars_type': 'lion', 'optimize_1d': True}, 5),
(MARS, {'lr': 5e-1, 'lr_1d': 5e-1, 'weight_decay': 1e-3, 'mars_type': 'shampoo'}, 5),
]
ADANORM_SUPPORTED_OPTIMIZERS: List[Tuple[Any, Dict[str, Union[float, bool, int]], int]] = [
(AdaBelief, {'lr': 5e-1, 'weight_decay': 1e-3, 'adanorm': True}, 10),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_load_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def test_load_lr_scheduler_invalid(invalid_lr_scheduler_names):


def test_get_supported_optimizers():
assert len(get_supported_optimizers()) == 83
assert len(get_supported_optimizers()) == 84
assert len(get_supported_optimizers('adam*')) == 7
assert len(get_supported_optimizers(['adam*', 'ranger*'])) == 9

Expand Down
2 changes: 1 addition & 1 deletion tests/test_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
)
from pytorch_optimizer.optimizer.alig import l2_projection
from pytorch_optimizer.optimizer.grokfast import gradfilter_ema, gradfilter_ma
from pytorch_optimizer.optimizer.muon import zero_power_via_newton_schulz_5
from pytorch_optimizer.optimizer.shampoo_utils import zero_power_via_newton_schulz_5
from tests.constants import (
ADAMD_SUPPORTED_OPTIMIZERS,
ADANORM_SUPPORTED_OPTIMIZERS,
Expand Down

0 comments on commit f78be4b

Please sign in to comment.