diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 7d66c8d1..656db586 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -69,7 +69,7 @@ nbdev_export
If you're working on the local interface you can just use `nbdev_test --n_workers 1 --do_print --timing`.
### Cleaning notebooks
-Since the notebooks output cells can vary from run to run (even if they produce the same outputs) the notebooks are cleaned before committing them. Please make sure to run `nbdev_clean --clear_all` before committing your changes. If you clean the library's notebooks with this command please backtrack the changes you make to the example notebooks `git checkout nbs/examples`, unless you intend to change the examples.
+Since the notebooks output cells can vary from run to run (even if they produce the same outputs) the notebooks are cleaned before committing them. Please make sure to run `nbdev_clean --clear_all` before committing your changes. If you clean the library's notebooks with this command please backtrack the changes you make to the example notebooks `git checkout nbs/docs`, unless you intend to change the examples.
## Do you want to contribute to the documentation?
@@ -78,6 +78,6 @@ Since the notebooks output cells can vary from run to run (even if they produce
1. Find the relevant notebook.
2. Make your changes.
3. Run all cells.
- 4. If you are modifying library notebooks (not in `nbs/examples`), clean all outputs using `Edit > Clear All Outputs`.
+ 4. If you are modifying library notebooks (not in `nbs/docs`), clean all outputs using `Edit > Clear All Outputs`.
5. Run `nbdev_preview`.
6. Clean the notebook metadata using `nbdev_clean`.
diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb
index 97d2f1ba..4ee3ff9d 100644
--- a/nbs/common.base_model.ipynb
+++ b/nbs/common.base_model.ipynb
@@ -34,7 +34,6 @@
"import random\n",
"import warnings\n",
"from contextlib import contextmanager\n",
- "from copy import deepcopy\n",
"from dataclasses import dataclass\n",
"\n",
"import fsspec\n",
@@ -121,15 +120,12 @@
" random_seed,\n",
" loss,\n",
" valid_loss,\n",
- " optimizer,\n",
- " optimizer_kwargs,\n",
- " lr_scheduler,\n",
- " lr_scheduler_kwargs,\n",
" futr_exog_list,\n",
" hist_exog_list,\n",
" stat_exog_list,\n",
" max_steps,\n",
" early_stop_patience_steps,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs,\n",
" ):\n",
" super().__init__()\n",
@@ -150,18 +146,8 @@
" self.train_trajectories = []\n",
" self.valid_trajectories = []\n",
"\n",
- " # Optimization\n",
- " if optimizer is not None and not issubclass(optimizer, torch.optim.Optimizer):\n",
- " raise TypeError(\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n",
- " self.optimizer = optimizer\n",
- " self.optimizer_kwargs = optimizer_kwargs if optimizer_kwargs is not None else {}\n",
- "\n",
- " # lr scheduler\n",
- " if lr_scheduler is not None and not issubclass(lr_scheduler, torch.optim.lr_scheduler.LRScheduler):\n",
- " raise TypeError(\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n",
- " self.lr_scheduler = lr_scheduler\n",
- " self.lr_scheduler_kwargs = lr_scheduler_kwargs if lr_scheduler_kwargs is not None else {}\n",
- "\n",
+ " # function has the same signature as LightningModule's configure_optimizer\n",
+ " self.config_optimizers = config_optimizers\n",
"\n",
" # Variables\n",
" self.futr_exog_list = list(futr_exog_list) if futr_exog_list is not None else []\n",
@@ -399,39 +385,20 @@
" random.seed(self.random_seed)\n",
"\n",
" def configure_optimizers(self):\n",
- " if self.optimizer:\n",
- " optimizer_signature = inspect.signature(self.optimizer)\n",
- " optimizer_kwargs = deepcopy(self.optimizer_kwargs)\n",
- " if 'lr' in optimizer_signature.parameters:\n",
- " if 'lr' in optimizer_kwargs:\n",
- " warnings.warn(\"ignoring learning rate passed in optimizer_kwargs, using the model's learning rate\")\n",
- " optimizer_kwargs['lr'] = self.learning_rate\n",
- " optimizer = self.optimizer(params=self.parameters(), **optimizer_kwargs)\n",
- " else:\n",
- " if self.optimizer_kwargs:\n",
- " warnings.warn(\n",
- " \"ignoring optimizer_kwargs as the optimizer is not specified\"\n",
- " ) \n",
- " optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)\n",
+ " if self.config_optimizers is not None:\n",
+ " # return the customized optimizer settings if specified\n",
+ " return self.config_optimizers(self)\n",
" \n",
- " lr_scheduler = {'frequency': 1, 'interval': 'step'}\n",
- " if self.lr_scheduler:\n",
- " lr_scheduler_signature = inspect.signature(self.lr_scheduler)\n",
- " lr_scheduler_kwargs = deepcopy(self.lr_scheduler_kwargs)\n",
- " if 'optimizer' in lr_scheduler_signature.parameters:\n",
- " if 'optimizer' in lr_scheduler_kwargs:\n",
- " warnings.warn(\"ignoring optimizer passed in lr_scheduler_kwargs, using the model's optimizer\")\n",
- " del lr_scheduler_kwargs['optimizer']\n",
- " lr_scheduler['scheduler'] = self.lr_scheduler(optimizer=optimizer, **lr_scheduler_kwargs)\n",
- " else:\n",
- " if self.lr_scheduler_kwargs:\n",
- " warnings.warn(\n",
- " \"ignoring lr_scheduler_kwargs as the lr_scheduler is not specified\"\n",
- " ) \n",
- " lr_scheduler['scheduler'] = torch.optim.lr_scheduler.StepLR(\n",
+ " # default choice\n",
+ " optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)\n",
+ " scheduler = {\n",
+ " \"scheduler\": torch.optim.lr_scheduler.StepLR(\n",
" optimizer=optimizer, step_size=self.lr_decay_steps, gamma=0.5\n",
- " )\n",
- " return {'optimizer': optimizer, 'lr_scheduler': lr_scheduler}\n",
+ " ),\n",
+ " \"frequency\": 1,\n",
+ " \"interval\": \"step\",\n",
+ " }\n",
+ " return {\"optimizer\": optimizer, \"lr_scheduler\": scheduler}\n",
"\n",
" def get_test_size(self):\n",
" return self.test_size\n",
diff --git a/nbs/common.base_multivariate.ipynb b/nbs/common.base_multivariate.ipynb
index 962d0c6d..f642ab7c 100644
--- a/nbs/common.base_multivariate.ipynb
+++ b/nbs/common.base_multivariate.ipynb
@@ -104,25 +104,19 @@
" drop_last_loader=False,\n",
" random_seed=1, \n",
" alias=None,\n",
- " optimizer=None,\n",
- " optimizer_kwargs=None,\n",
- " lr_scheduler=None,\n",
- " lr_scheduler_kwargs=None,\n",
" dataloader_kwargs=None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super().__init__(\n",
" random_seed=random_seed,\n",
" loss=loss,\n",
" valid_loss=valid_loss,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs, \n",
" futr_exog_list=futr_exog_list,\n",
" hist_exog_list=hist_exog_list,\n",
" stat_exog_list=stat_exog_list,\n",
" max_steps=max_steps,\n",
" early_stop_patience_steps=early_stop_patience_steps,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs,\n",
" )\n",
"\n",
diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb
index bf891927..d4d5432b 100644
--- a/nbs/common.base_recurrent.ipynb
+++ b/nbs/common.base_recurrent.ipynb
@@ -110,25 +110,19 @@
" drop_last_loader=False,\n",
" random_seed=1, \n",
" alias=None,\n",
- " optimizer=None,\n",
- " optimizer_kwargs=None,\n",
- " lr_scheduler=None,\n",
- " lr_scheduler_kwargs=None,\n",
" dataloader_kwargs=None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super().__init__(\n",
" random_seed=random_seed,\n",
" loss=loss,\n",
" valid_loss=valid_loss,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" futr_exog_list=futr_exog_list,\n",
" hist_exog_list=hist_exog_list,\n",
" stat_exog_list=stat_exog_list,\n",
" max_steps=max_steps,\n",
- " early_stop_patience_steps=early_stop_patience_steps, \n",
+ " early_stop_patience_steps=early_stop_patience_steps,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs,\n",
" )\n",
"\n",
diff --git a/nbs/common.base_windows.ipynb b/nbs/common.base_windows.ipynb
index 72a9cfeb..01a4e749 100644
--- a/nbs/common.base_windows.ipynb
+++ b/nbs/common.base_windows.ipynb
@@ -114,25 +114,19 @@
" drop_last_loader=False,\n",
" random_seed=1,\n",
" alias=None,\n",
- " optimizer=None,\n",
- " optimizer_kwargs=None,\n",
- " lr_scheduler=None,\n",
- " lr_scheduler_kwargs=None,\n",
" dataloader_kwargs=None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super().__init__(\n",
" random_seed=random_seed,\n",
" loss=loss,\n",
" valid_loss=valid_loss,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" futr_exog_list=futr_exog_list,\n",
" hist_exog_list=hist_exog_list,\n",
" stat_exog_list=stat_exog_list,\n",
" max_steps=max_steps,\n",
- " early_stop_patience_steps=early_stop_patience_steps, \n",
+ " early_stop_patience_steps=early_stop_patience_steps,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs,\n",
" )\n",
"\n",
diff --git a/nbs/core.ipynb b/nbs/core.ipynb
index 5138b5e6..3c7f9535 100644
--- a/nbs/core.ipynb
+++ b/nbs/core.ipynb
@@ -3054,114 +3054,23 @@
"# test customized optimizer behavior such that the user defined optimizer result should differ from default\n",
"# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
"\n",
- "for nf_model in [NHITS, RNN, StemGNN]:\n",
- " # default optimizer is based on Adam\n",
- " params = {\"h\": 12, \"input_size\": 24, \"max_steps\": 1}\n",
- " if nf_model.__name__ == \"StemGNN\":\n",
- " params.update({\"n_series\": 2})\n",
- " models = [nf_model(**params)]\n",
- " nf = NeuralForecast(models=models, freq='M')\n",
- " nf.fit(AirPassengersPanel_train)\n",
- " default_optimizer_predict = nf.predict()\n",
- " mean = default_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
- "\n",
- " # using a customized optimizer\n",
- " params.update({\n",
- " \"optimizer\": torch.optim.Adadelta,\n",
- " \"optimizer_kwargs\": {\"rho\": 0.45}, \n",
- " })\n",
- " models2 = [nf_model(**params)]\n",
- " nf2 = NeuralForecast(models=models2, freq='M')\n",
- " nf2.fit(AirPassengersPanel_train)\n",
- " customized_optimizer_predict = nf2.predict()\n",
- " mean2 = customized_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
- " assert mean2 != mean"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "3db3fe1e",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| hide\n",
- "# test that if the user-defined optimizer is not a subclass of torch.optim.optimizer, failed with exception\n",
- "# tests cover different types of base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
- "test_fail(lambda: NHITS(h=12, input_size=24, max_steps=10, optimizer=torch.nn.Module), contains=\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n",
- "test_fail(lambda: RNN(h=12, input_size=24, max_steps=10, optimizer=torch.nn.Module), contains=\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n",
- "test_fail(lambda: StemGNN(h=12, input_size=24, max_steps=10, n_series=2, optimizer=torch.nn.Module), contains=\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "d908240f",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| hide\n",
- "# test that if we pass \"lr\" parameter, we expect warning and it ignores the passed in 'lr' parameter\n",
- "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
- "\n",
- "for nf_model in [NHITS, RNN, StemGNN]:\n",
- " params = {\n",
- " \"h\": 12, \n",
- " \"input_size\": 24, \n",
- " \"max_steps\": 1, \n",
- " \"optimizer\": torch.optim.Adadelta, \n",
- " \"optimizer_kwargs\": {\"lr\": 0.8, \"rho\": 0.45}\n",
- " }\n",
- " if nf_model.__name__ == \"StemGNN\":\n",
- " params.update({\"n_series\": 2})\n",
- " models = [nf_model(**params)]\n",
- " nf = NeuralForecast(models=models, freq='M')\n",
- " with warnings.catch_warnings(record=True) as issued_warnings:\n",
- " warnings.simplefilter('always', UserWarning)\n",
- " nf.fit(AirPassengersPanel_train)\n",
- " assert any(\"ignoring learning rate passed in optimizer_kwargs, using the model's learning rate\" in str(w.message) for w in issued_warnings)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "c97858b5-e6a0-4353-a48f-5a5460eb2314",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| hide\n",
- "# test that if we pass \"optimizer_kwargs\" but not \"optimizer\", we expect a warning\n",
- "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
- "\n",
- "for nf_model in [NHITS, RNN, StemGNN]:\n",
- " params = {\n",
- " \"h\": 12, \n",
- " \"input_size\": 24, \n",
- " \"max_steps\": 1,\n",
- " \"optimizer_kwargs\": {\"lr\": 0.8, \"rho\": 0.45}\n",
+ "def custom_optimizer(base_model):\n",
+ " optimizer = torch.optim.Adadelta(params=base_model.parameters(), rho=0.75)\n",
+ " scheduler=torch.optim.lr_scheduler.StepLR(\n",
+ " optimizer=optimizer, step_size=10e7, gamma=0.5\n",
+ " )\n",
+ " scheduler_config = {\n",
+ " 'scheduler': scheduler,\n",
+ " 'interval': 'step',\n",
+ " 'frequency': 1,\n",
+ " 'monitor': 'val_loss',\n",
+ " 'strict': True,\n",
+ " 'name': None,\n",
" }\n",
- " if nf_model.__name__ == \"StemGNN\":\n",
- " params.update({\"n_series\": 2})\n",
- " models = [nf_model(**params)]\n",
- " nf = NeuralForecast(models=models, freq='M')\n",
- " with warnings.catch_warnings(record=True) as issued_warnings:\n",
- " warnings.simplefilter('always', UserWarning)\n",
- " nf.fit(AirPassengersPanel_train)\n",
- " assert any(\"ignoring optimizer_kwargs as the optimizer is not specified\" in str(w.message) for w in issued_warnings)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "24142322",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| hide\n",
- "# test customized lr_scheduler behavior such that the user defined lr_scheduler result should differ from default\n",
- "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
+ " return {'optimizer': optimizer, 'lr_scheduler': scheduler_config}\n",
"\n",
"for nf_model in [NHITS, RNN, StemGNN]:\n",
+ " # default optimizer is based on Adam\n",
" params = {\"h\": 12, \"input_size\": 24, \"max_steps\": 1}\n",
" if nf_model.__name__ == \"StemGNN\":\n",
" params.update({\"n_series\": 2})\n",
@@ -3171,89 +3080,15 @@
" default_optimizer_predict = nf.predict()\n",
" mean = default_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
"\n",
- " # using a customized lr_scheduler, default is StepLR\n",
- " params.update({\n",
- " \"lr_scheduler\": torch.optim.lr_scheduler.ConstantLR,\n",
- " \"lr_scheduler_kwargs\": {\"factor\": 0.78}, \n",
- " })\n",
+ " # employ custom optimizer\n",
+ " params.update({'config_optimizers': custom_optimizer})\n",
" models2 = [nf_model(**params)]\n",
" nf2 = NeuralForecast(models=models2, freq='M')\n",
" nf2.fit(AirPassengersPanel_train)\n",
" customized_optimizer_predict = nf2.predict()\n",
" mean2 = customized_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
- " assert mean2 != mean"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "54c7b5e2",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| hide\n",
- "# test that if the user-defined lr_scheduler is not a subclass of torch.optim.lr_scheduler, failed with exception\n",
- "# tests cover different types of base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
- "test_fail(lambda: NHITS(h=12, input_size=24, max_steps=10, lr_scheduler=torch.nn.Module), contains=\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n",
- "test_fail(lambda: RNN(h=12, input_size=24, max_steps=10, lr_scheduler=torch.nn.Module), contains=\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n",
- "test_fail(lambda: StemGNN(h=12, input_size=24, max_steps=10, n_series=2, lr_scheduler=torch.nn.Module), contains=\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b1d8bebb",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| hide\n",
- "# test that if we pass in \"optimizer\" parameter, we expect warning and it ignores them\n",
- "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
- "\n",
- "for nf_model in [NHITS, RNN, StemGNN]:\n",
- " params = {\n",
- " \"h\": 12, \n",
- " \"input_size\": 24, \n",
- " \"max_steps\": 1, \n",
- " \"lr_scheduler\": torch.optim.lr_scheduler.ConstantLR, \n",
- " \"lr_scheduler_kwargs\": {\"optimizer\": torch.optim.Adadelta, \"factor\": 0.22}\n",
- " }\n",
- " if nf_model.__name__ == \"StemGNN\":\n",
- " params.update({\"n_series\": 2})\n",
- " models = [nf_model(**params)]\n",
- " nf = NeuralForecast(models=models, freq='M')\n",
- " with warnings.catch_warnings(record=True) as issued_warnings:\n",
- " warnings.simplefilter('always', UserWarning)\n",
- " nf.fit(AirPassengersPanel_train)\n",
- " assert any(\"ignoring optimizer passed in lr_scheduler_kwargs, using the model's optimizer\" in str(w.message) for w in issued_warnings)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "06febece",
- "metadata": {},
- "outputs": [],
- "source": [
- "#| hide\n",
- "# test that if we pass in \"lr_scheduler_kwargs\" but not \"lr_scheduler\", we expect a warning\n",
- "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
"\n",
- "for nf_model in [NHITS, RNN, StemGNN]:\n",
- " params = {\n",
- " \"h\": 12, \n",
- " \"input_size\": 24, \n",
- " \"max_steps\": 1,\n",
- " \"lr_scheduler_kwargs\": {\"optimizer\": torch.optim.Adadelta, \"factor\": 0.22}\n",
- " }\n",
- " if nf_model.__name__ == \"StemGNN\":\n",
- " params.update({\"n_series\": 2})\n",
- " models = [nf_model(**params)]\n",
- " nf = NeuralForecast(models=models, freq='M')\n",
- " with warnings.catch_warnings(record=True) as issued_warnings:\n",
- " warnings.simplefilter('always', UserWarning)\n",
- " nf.fit(AirPassengersPanel_train)\n",
- " assert any(\"ignoring lr_scheduler_kwargs as the lr_scheduler is not specified\" in str(w.message) for w in issued_warnings)\n"
+ " assert mean2 != mean"
]
},
{
@@ -3359,6 +3194,56 @@
")\n",
"assert all([col in cv2.columns for col in ['NHITS-lo-30', 'NHITS-hi-30']])"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "1f0ca124",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| hide\n",
+ "# test customized lr_scheduler behavior such that the user defined lr_scheduler result should differ from default\n",
+ "# we have full control on the optimization behavior such that ReduceLROnPlateau can be supported\n",
+ "# by passing the monitor parameter as well\n",
+ "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
+ "\n",
+ "def custom_optimizer(base_model):\n",
+ " optimizer = torch.optim.Adadelta(params=base_model.parameters(), rho=0.75)\n",
+ "\n",
+ " # test ReduceLROnPlateau\n",
+ " scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
+ " optimizer, mode='min', factor=0.5, patience=2,\n",
+ " ) \n",
+ " scheduler_config = {\n",
+ " 'scheduler': scheduler,\n",
+ " 'interval': 'step',\n",
+ " 'frequency': 1,\n",
+ " 'monitor': 'train_loss', # note that train_loss is used instead val_loss\n",
+ " 'strict': True,\n",
+ " 'name': None,\n",
+ " }\n",
+ " return {'optimizer': optimizer, 'lr_scheduler': scheduler_config}\n",
+ "\n",
+ "for nf_model in [NHITS, RNN, StemGNN]:\n",
+ " params = {\"h\": 12, \"input_size\": 24, \"max_steps\": 2}\n",
+ " if nf_model.__name__ == \"StemGNN\":\n",
+ " params.update({\"n_series\": 2})\n",
+ " models = [nf_model(**params)]\n",
+ " nf = NeuralForecast(models=models, freq='M')\n",
+ " nf.fit(AirPassengersPanel_train)\n",
+ " default_predict = nf.predict()\n",
+ " mean = default_predict.loc[:, nf_model.__name__].mean()\n",
+ "\n",
+ " # employ custom optimizer\n",
+ " params.update({'config_optimizers': custom_optimizer})\n",
+ " models2 = [nf_model(**params)]\n",
+ " nf2 = NeuralForecast(models=models2, freq='M')\n",
+ " nf2.fit(AirPassengersPanel_train)\n",
+ " customized_predict = nf2.predict()\n",
+ " mean2 = customized_predict.loc[:, nf_model.__name__].mean()\n",
+ " assert mean2 != mean\n"
+ ]
}
],
"metadata": {
diff --git a/nbs/models.autoformer.ipynb b/nbs/models.autoformer.ipynb
index 0badf850..872a7b28 100644
--- a/nbs/models.autoformer.ipynb
+++ b/nbs/models.autoformer.ipynb
@@ -457,12 +457,11 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
- " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
+ " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
"\t*References*
\n",
"\t- [Wu, Haixu, Jiehui Xu, Jianmin Wang, and Mingsheng Long. \"Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting\"](https://proceedings.neurips.cc/paper/2021/hash/bcc0d400288793e8bdcd7c19a8ac0c2b-Abstract.html)
\n",
@@ -506,11 +505,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs=None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(Autoformer, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -534,11 +530,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.bitcn.ipynb b/nbs/models.bitcn.ipynb
index 14f8ee60..5723648a 100644
--- a/nbs/models.bitcn.ipynb
+++ b/nbs/models.bitcn.ipynb
@@ -177,11 +177,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References**
\n",
@@ -219,11 +218,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs=None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(BiTCN, self).__init__(\n",
" h=h,\n",
@@ -248,11 +244,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
diff --git a/nbs/models.deepar.ipynb b/nbs/models.deepar.ipynb
index 29fcb12b..350a21ca 100644
--- a/nbs/models.deepar.ipynb
+++ b/nbs/models.deepar.ipynb
@@ -182,11 +182,10 @@
" `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References**
\n",
@@ -229,11 +228,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
"\n",
" if exclude_insample_y:\n",
@@ -271,11 +267,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" self.horizon_backup = self.h # Used because h=0 during training\n",
diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb
index 8f63edbb..1a0704c0 100644
--- a/nbs/models.deepnpts.ipynb
+++ b/nbs/models.deepnpts.ipynb
@@ -120,11 +120,10 @@
" `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References**
\n",
@@ -164,11 +163,8 @@
" scaler_type: str = 'standard',\n",
" random_seed: int = 1,\n",
" drop_last_loader = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
"\n",
" if exclude_insample_y:\n",
@@ -203,11 +199,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" self.h = h\n",
diff --git a/nbs/models.dilated_rnn.ipynb b/nbs/models.dilated_rnn.ipynb
index 30629252..b8f5ea08 100644
--- a/nbs/models.dilated_rnn.ipynb
+++ b/nbs/models.dilated_rnn.ipynb
@@ -389,11 +389,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -428,11 +427,8 @@
" scaler_type: str = 'robust',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(DilatedRNN, self).__init__(\n",
" h=h,\n",
@@ -453,11 +449,8 @@
" stat_exog_list=stat_exog_list,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
diff --git a/nbs/models.dlinear.ipynb b/nbs/models.dlinear.ipynb
index 994ea0e1..efe0878e 100644
--- a/nbs/models.dlinear.ipynb
+++ b/nbs/models.dlinear.ipynb
@@ -161,11 +161,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
"\t*References*
\n",
@@ -201,11 +200,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs=None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(DLinear, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -229,11 +225,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" \n",
" # Architecture\n",
diff --git a/nbs/models.fedformer.ipynb b/nbs/models.fedformer.ipynb
index bd423274..7558530e 100644
--- a/nbs/models.fedformer.ipynb
+++ b/nbs/models.fedformer.ipynb
@@ -450,11 +450,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" \"\"\"\n",
@@ -498,11 +497,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer=None,\n",
- " optimizer_kwargs=None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(FEDformer, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -525,11 +521,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
- " dataloader_kwargs=dataloader_kwargs, \n",
+ " dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" # Architecture\n",
" self.label_len = int(np.ceil(input_size * decoder_input_size_multiplier))\n",
diff --git a/nbs/models.gru.ipynb b/nbs/models.gru.ipynb
index eb2f91ad..eebe2362 100644
--- a/nbs/models.gru.ipynb
+++ b/nbs/models.gru.ipynb
@@ -133,11 +133,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -173,11 +172,8 @@
" scaler_type: str='robust',\n",
" random_seed=1,\n",
" drop_last_loader = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(GRU, self).__init__(\n",
" h=h,\n",
@@ -198,11 +194,8 @@
" stat_exog_list=stat_exog_list,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
diff --git a/nbs/models.informer.ipynb b/nbs/models.informer.ipynb
index dffc74e8..bde9492f 100644
--- a/nbs/models.informer.ipynb
+++ b/nbs/models.informer.ipynb
@@ -305,11 +305,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
"\t*References*
\n",
@@ -354,11 +353,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(Informer, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -382,11 +378,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.itransformer.ipynb b/nbs/models.itransformer.ipynb
index e8b6f15d..5ea9736d 100644
--- a/nbs/models.itransformer.ipynb
+++ b/nbs/models.itransformer.ipynb
@@ -227,11 +227,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \n",
" **References**
\n",
@@ -271,11 +270,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None, \n",
- " dataloader_kwargs = None, \n",
+ " dataloader_kwargs = None,\n",
+ " config_optimizers=None, \n",
" **trainer_kwargs):\n",
" \n",
" super(iTransformer, self).__init__(h=h,\n",
@@ -296,11 +292,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" \n",
" self.enc_in = n_series\n",
diff --git a/nbs/models.kan.ipynb b/nbs/models.kan.ipynb
index e825de60..cdf08b7a 100644
--- a/nbs/models.kan.ipynb
+++ b/nbs/models.kan.ipynb
@@ -361,9 +361,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References**
\n",
@@ -409,9 +410,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" \n",
" # Inherit BaseWindows class\n",
@@ -437,9 +437,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
" dataloader_kwargs = dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" \n",
" # Architecture\n",
diff --git a/nbs/models.lstm.ipynb b/nbs/models.lstm.ipynb
index 389bc6ac..af9f77f9 100644
--- a/nbs/models.lstm.ipynb
+++ b/nbs/models.lstm.ipynb
@@ -120,11 +120,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -159,11 +158,8 @@
" scaler_type: str = 'robust',\n",
" random_seed = 1,\n",
" drop_last_loader = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" super(LSTM, self).__init__(\n",
" h=h,\n",
@@ -184,11 +180,8 @@
" stat_exog_list=stat_exog_list,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
diff --git a/nbs/models.mlp.ipynb b/nbs/models.mlp.ipynb
index b644cf95..ae24586c 100644
--- a/nbs/models.mlp.ipynb
+++ b/nbs/models.mlp.ipynb
@@ -113,11 +113,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -151,11 +150,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
"\n",
" # Inherit BaseWindows class\n",
@@ -181,11 +177,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.mlpmultivariate.ipynb b/nbs/models.mlpmultivariate.ipynb
index dfcab59d..ed2cfefd 100644
--- a/nbs/models.mlpmultivariate.ipynb
+++ b/nbs/models.mlpmultivariate.ipynb
@@ -107,11 +107,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -141,11 +140,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
"\n",
" # Inherit BaseMultivariate class\n",
@@ -167,11 +163,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.nbeats.ipynb b/nbs/models.nbeats.ipynb
index a855c4f4..090df8fb 100644
--- a/nbs/models.nbeats.ipynb
+++ b/nbs/models.nbeats.ipynb
@@ -269,11 +269,10 @@
" `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -313,11 +312,8 @@
" scaler_type: str ='identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" \n",
" # Protect horizon collapsed seasonality and trend NBEATSx-i basis\n",
@@ -345,11 +341,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.nbeatsx.ipynb b/nbs/models.nbeatsx.ipynb
index 04f56ea5..121fb3d7 100644
--- a/nbs/models.nbeatsx.ipynb
+++ b/nbs/models.nbeatsx.ipynb
@@ -413,11 +413,10 @@
" `random_seed`: int, random seed initialization for replicability.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -463,11 +462,8 @@
" scaler_type: str = \"identity\",\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs,\n",
" ):\n",
" # Protect horizon collapsed seasonality and trend NBEATSx-i basis\n",
@@ -499,11 +495,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.nhits.ipynb b/nbs/models.nhits.ipynb
index 43351c6a..d9a6fadf 100644
--- a/nbs/models.nhits.ipynb
+++ b/nbs/models.nhits.ipynb
@@ -302,11 +302,10 @@
" `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -352,11 +351,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
"\n",
" # Inherit BaseWindows class\n",
@@ -382,11 +378,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.nlinear.ipynb b/nbs/models.nlinear.ipynb
index 97425600..5d1999ec 100644
--- a/nbs/models.nlinear.ipynb
+++ b/nbs/models.nlinear.ipynb
@@ -101,11 +101,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
"\t*References*
\n",
@@ -140,11 +139,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" super(NLinear, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -168,11 +164,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.patchtst.ipynb b/nbs/models.patchtst.ipynb
index b5ecd50c..1c48ac0e 100644
--- a/nbs/models.patchtst.ipynb
+++ b/nbs/models.patchtst.ipynb
@@ -661,11 +661,10 @@
" `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -717,11 +716,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers= None,\n",
" **trainer_kwargs):\n",
" super(PatchTST, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -745,11 +741,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs) \n",
"\n",
" # Enforce correct patch_len, regardless of user input\n",
diff --git a/nbs/models.rmok.ipynb b/nbs/models.rmok.ipynb
index cee78191..b666cf83 100644
--- a/nbs/models.rmok.ipynb
+++ b/nbs/models.rmok.ipynb
@@ -358,11 +358,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" Reference
\n",
@@ -399,11 +398,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
- " dataloader_kwargs = None, \n",
+ " dataloader_kwargs = None,\n",
+ " config_optimizers = None, \n",
" **trainer_kwargs):\n",
" \n",
" super(RMoK, self).__init__(h=h,\n",
@@ -424,11 +420,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" \n",
" self.input_size = input_size\n",
diff --git a/nbs/models.rnn.ipynb b/nbs/models.rnn.ipynb
index 6fda8dea..cfff45df 100644
--- a/nbs/models.rnn.ipynb
+++ b/nbs/models.rnn.ipynb
@@ -124,13 +124,11 @@
" `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n",
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- "\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -166,11 +164,8 @@
" scaler_type: str='robust',\n",
" random_seed=1,\n",
" drop_last_loader=False,\n",
- " optimizer=None,\n",
- " optimizer_kwargs=None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None, \n",
- " dataloader_kwargs = None, \n",
+ " dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" super(RNN, self).__init__(\n",
" h=h,\n",
@@ -191,11 +186,8 @@
" stat_exog_list=stat_exog_list,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
diff --git a/nbs/models.softs.ipynb b/nbs/models.softs.ipynb
index 7afabeff..2f73995b 100644
--- a/nbs/models.softs.ipynb
+++ b/nbs/models.softs.ipynb
@@ -199,11 +199,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \n",
" **References**
\n",
@@ -241,11 +240,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None, \n",
- " dataloader_kwargs = None, \n",
+ " dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" \n",
" super(SOFTS, self).__init__(h=h,\n",
@@ -266,11 +262,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" \n",
" self.h = h\n",
diff --git a/nbs/models.stemgnn.ipynb b/nbs/models.stemgnn.ipynb
index 357a6985..1a1edc78 100644
--- a/nbs/models.stemgnn.ipynb
+++ b/nbs/models.stemgnn.ipynb
@@ -203,11 +203,10 @@
" `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -239,11 +238,8 @@
" scaler_type: str = 'robust',\n",
" random_seed: int = 1,\n",
" drop_last_loader = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers= None,\n",
" **trainer_kwargs):\n",
"\n",
" # Inherit BaseMultivariate class\n",
@@ -265,11 +261,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" # Quick fix for now, fix the model later.\n",
" if n_stacks != 2:\n",
diff --git a/nbs/models.tcn.ipynb b/nbs/models.tcn.ipynb
index ffa49006..06cc8c9d 100644
--- a/nbs/models.tcn.ipynb
+++ b/nbs/models.tcn.ipynb
@@ -125,11 +125,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
" \"\"\"\n",
" # Class attributes\n",
@@ -164,11 +163,8 @@
" scaler_type: str ='robust',\n",
" random_seed: int = 1,\n",
" drop_last_loader = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None, \n",
- " dataloader_kwargs = None, \n",
+ " dataloader_kwargs = None,\n",
+ " config_optimizers=None,\n",
" **trainer_kwargs):\n",
" super(TCN, self).__init__(\n",
" h=h,\n",
@@ -189,11 +185,8 @@
" stat_exog_list=stat_exog_list,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs = dataloader_kwargs,\n",
+ " config_optimizers = config_optimizers,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
diff --git a/nbs/models.tft.ipynb b/nbs/models.tft.ipynb
index 6e313bb1..1a5bd339 100644
--- a/nbs/models.tft.ipynb
+++ b/nbs/models.tft.ipynb
@@ -695,11 +695,10 @@
" `random_seed`: int, random seed initialization for replicability.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -742,11 +741,8 @@
" scaler_type: str = \"robust\",\n",
" drop_last_loader=False,\n",
" random_seed: int = 1,\n",
- " optimizer=None,\n",
- " optimizer_kwargs=None,\n",
- " lr_scheduler=None,\n",
- " lr_scheduler_kwargs=None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs,\n",
" ):\n",
"\n",
@@ -773,11 +769,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs,\n",
" )\n",
" self.example_length = input_size + h\n",
diff --git a/nbs/models.tide.ipynb b/nbs/models.tide.ipynb
index 3b096a26..49a0643b 100644
--- a/nbs/models.tide.ipynb
+++ b/nbs/models.tide.ipynb
@@ -166,11 +166,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -214,11 +213,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
"\n",
" # Inherit BaseWindows class\n",
@@ -245,11 +241,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs\n",
" ) \n",
" self.h = h\n",
diff --git a/nbs/models.timellm.ipynb b/nbs/models.timellm.ipynb
index 81251588..b8a17b19 100755
--- a/nbs/models.timellm.ipynb
+++ b/nbs/models.timellm.ipynb
@@ -290,11 +290,10 @@
" `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -346,11 +345,8 @@
" scaler_type: str = 'identity',\n",
" drop_last_loader: bool = False,\n",
" random_seed: int = 1,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" super(TimeLLM, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -373,11 +369,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" \n",
" # Architecture\n",
diff --git a/nbs/models.timemixer.ipynb b/nbs/models.timemixer.ipynb
index 1f405c26..5ee98d4f 100644
--- a/nbs/models.timemixer.ipynb
+++ b/nbs/models.timemixer.ipynb
@@ -359,11 +359,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References**
\n",
@@ -408,11 +407,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None, \n",
- " dataloader_kwargs = None, \n",
+ " dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" \n",
" super(TimeMixer, self).__init__(h=h,\n",
@@ -433,11 +429,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" \n",
" self.label_len = int(np.ceil(input_size * decoder_input_size_multiplier))\n",
diff --git a/nbs/models.timesnet.ipynb b/nbs/models.timesnet.ipynb
index bc85e712..fadfb1c3 100644
--- a/nbs/models.timesnet.ipynb
+++ b/nbs/models.timesnet.ipynb
@@ -261,14 +261,11 @@
" Random_seed for pytorch initializer and numpy generators.\n",
" drop_last_loader : bool (default=False)\n",
" If True `TimeSeriesDataLoader` drops last non-full batch.\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)\n",
- " User specified optimizer instead of the default choice (Adam).\n",
- " `optimizer_kwargs`: dict, optional (defualt=None)\n",
- " List of parameters used by the user specified `optimizer`.\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional (default=None)\n",
" List of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" **trainer_kwargs\n",
" Keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer)\n",
"\n",
@@ -311,11 +308,8 @@
" scaler_type: str = 'standard',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None, \n",
- " dataloader_kwargs = None, \n",
+ " dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" super(TimesNet, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -339,11 +333,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs, \n",
- " dataloader_kwargs=dataloader_kwargs, \n",
+ " dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/nbs/models.tsmixer.ipynb b/nbs/models.tsmixer.ipynb
index 55080cad..bef03803 100644
--- a/nbs/models.tsmixer.ipynb
+++ b/nbs/models.tsmixer.ipynb
@@ -249,11 +249,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -289,11 +288,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
"\n",
" # Inherit BaseMultivariate class\n",
@@ -315,11 +311,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Reversible InstanceNormalization layer\n",
diff --git a/nbs/models.tsmixerx.ipynb b/nbs/models.tsmixerx.ipynb
index 74ba735e..4916129a 100644
--- a/nbs/models.tsmixerx.ipynb
+++ b/nbs/models.tsmixerx.ipynb
@@ -273,11 +273,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
" **References:**
\n",
@@ -313,11 +312,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
"\n",
" # Inherit BaseMultvariate class\n",
@@ -339,11 +335,8 @@
" scaler_type=scaler_type,\n",
" random_seed=random_seed,\n",
" drop_last_loader=drop_last_loader,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
" # Reversible InstanceNormalization layer\n",
" self.revin = revin\n",
diff --git a/nbs/models.vanillatransformer.ipynb b/nbs/models.vanillatransformer.ipynb
index 232de7df..78ef923b 100644
--- a/nbs/models.vanillatransformer.ipynb
+++ b/nbs/models.vanillatransformer.ipynb
@@ -197,11 +197,10 @@
" `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n",
" `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n",
" `alias`: str, optional, Custom name of the model.
\n",
- " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n",
- " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n",
- " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n",
- " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n",
+ " `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
\n",
+ " https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
\n",
+ " Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n",
"\n",
"\t*References*
\n",
@@ -243,11 +242,8 @@
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" drop_last_loader: bool = False,\n",
- " optimizer = None,\n",
- " optimizer_kwargs = None,\n",
- " lr_scheduler = None,\n",
- " lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
+ " config_optimizers = None,\n",
" **trainer_kwargs):\n",
" super(VanillaTransformer, self).__init__(h=h,\n",
" input_size=input_size,\n",
@@ -270,11 +266,8 @@
" scaler_type=scaler_type,\n",
" drop_last_loader=drop_last_loader,\n",
" random_seed=random_seed,\n",
- " optimizer=optimizer,\n",
- " optimizer_kwargs=optimizer_kwargs,\n",
- " lr_scheduler=lr_scheduler,\n",
- " lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
+ " config_optimizers=config_optimizers,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py
index 6facedff..bb001e2b 100644
--- a/neuralforecast/common/_base_model.py
+++ b/neuralforecast/common/_base_model.py
@@ -8,7 +8,6 @@
import random
import warnings
from contextlib import contextmanager
-from copy import deepcopy
from dataclasses import dataclass
import fsspec
@@ -72,15 +71,12 @@ def __init__(
random_seed,
loss,
valid_loss,
- optimizer,
- optimizer_kwargs,
- lr_scheduler,
- lr_scheduler_kwargs,
futr_exog_list,
hist_exog_list,
stat_exog_list,
max_steps,
early_stop_patience_steps,
+ config_optimizers=None,
**trainer_kwargs,
):
super().__init__()
@@ -101,25 +97,8 @@ def __init__(
self.train_trajectories = []
self.valid_trajectories = []
- # Optimization
- if optimizer is not None and not issubclass(optimizer, torch.optim.Optimizer):
- raise TypeError(
- "optimizer is not a valid subclass of torch.optim.Optimizer"
- )
- self.optimizer = optimizer
- self.optimizer_kwargs = optimizer_kwargs if optimizer_kwargs is not None else {}
-
- # lr scheduler
- if lr_scheduler is not None and not issubclass(
- lr_scheduler, torch.optim.lr_scheduler.LRScheduler
- ):
- raise TypeError(
- "lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler"
- )
- self.lr_scheduler = lr_scheduler
- self.lr_scheduler_kwargs = (
- lr_scheduler_kwargs if lr_scheduler_kwargs is not None else {}
- )
+ # function has the same signature as LightningModule's configure_optimizer
+ self.config_optimizers = config_optimizers
# Variables
self.futr_exog_list = list(futr_exog_list) if futr_exog_list is not None else []
@@ -375,45 +354,20 @@ def on_fit_start(self):
random.seed(self.random_seed)
def configure_optimizers(self):
- if self.optimizer:
- optimizer_signature = inspect.signature(self.optimizer)
- optimizer_kwargs = deepcopy(self.optimizer_kwargs)
- if "lr" in optimizer_signature.parameters:
- if "lr" in optimizer_kwargs:
- warnings.warn(
- "ignoring learning rate passed in optimizer_kwargs, using the model's learning rate"
- )
- optimizer_kwargs["lr"] = self.learning_rate
- optimizer = self.optimizer(params=self.parameters(), **optimizer_kwargs)
- else:
- if self.optimizer_kwargs:
- warnings.warn(
- "ignoring optimizer_kwargs as the optimizer is not specified"
- )
- optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)
-
- lr_scheduler = {"frequency": 1, "interval": "step"}
- if self.lr_scheduler:
- lr_scheduler_signature = inspect.signature(self.lr_scheduler)
- lr_scheduler_kwargs = deepcopy(self.lr_scheduler_kwargs)
- if "optimizer" in lr_scheduler_signature.parameters:
- if "optimizer" in lr_scheduler_kwargs:
- warnings.warn(
- "ignoring optimizer passed in lr_scheduler_kwargs, using the model's optimizer"
- )
- del lr_scheduler_kwargs["optimizer"]
- lr_scheduler["scheduler"] = self.lr_scheduler(
- optimizer=optimizer, **lr_scheduler_kwargs
- )
- else:
- if self.lr_scheduler_kwargs:
- warnings.warn(
- "ignoring lr_scheduler_kwargs as the lr_scheduler is not specified"
- )
- lr_scheduler["scheduler"] = torch.optim.lr_scheduler.StepLR(
+ if self.config_optimizers is not None:
+ # return the customized optimizer settings if specified
+ return self.config_optimizers(self)
+
+ # default choice
+ optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)
+ scheduler = {
+ "scheduler": torch.optim.lr_scheduler.StepLR(
optimizer=optimizer, step_size=self.lr_decay_steps, gamma=0.5
- )
- return {"optimizer": optimizer, "lr_scheduler": lr_scheduler}
+ ),
+ "frequency": 1,
+ "interval": "step",
+ }
+ return {"optimizer": optimizer, "lr_scheduler": scheduler}
def get_test_size(self):
return self.test_size
diff --git a/neuralforecast/common/_base_multivariate.py b/neuralforecast/common/_base_multivariate.py
index a1f8a51a..c858ee8c 100644
--- a/neuralforecast/common/_base_multivariate.py
+++ b/neuralforecast/common/_base_multivariate.py
@@ -49,26 +49,20 @@ def __init__(
drop_last_loader=False,
random_seed=1,
alias=None,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super().__init__(
random_seed=random_seed,
loss=loss,
valid_loss=valid_loss,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
futr_exog_list=futr_exog_list,
hist_exog_list=hist_exog_list,
stat_exog_list=stat_exog_list,
max_steps=max_steps,
early_stop_patience_steps=early_stop_patience_steps,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py
index abb66a0b..808bca62 100644
--- a/neuralforecast/common/_base_recurrent.py
+++ b/neuralforecast/common/_base_recurrent.py
@@ -49,26 +49,20 @@ def __init__(
drop_last_loader=False,
random_seed=1,
alias=None,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super().__init__(
random_seed=random_seed,
loss=loss,
valid_loss=valid_loss,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
futr_exog_list=futr_exog_list,
hist_exog_list=hist_exog_list,
stat_exog_list=stat_exog_list,
max_steps=max_steps,
early_stop_patience_steps=early_stop_patience_steps,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/common/_base_windows.py b/neuralforecast/common/_base_windows.py
index 74a81b95..4046ebca 100644
--- a/neuralforecast/common/_base_windows.py
+++ b/neuralforecast/common/_base_windows.py
@@ -52,26 +52,20 @@ def __init__(
drop_last_loader=False,
random_seed=1,
alias=None,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super().__init__(
random_seed=random_seed,
loss=loss,
valid_loss=valid_loss,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
futr_exog_list=futr_exog_list,
hist_exog_list=hist_exog_list,
stat_exog_list=stat_exog_list,
max_steps=max_steps,
early_stop_patience_steps=early_stop_patience_steps,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/autoformer.py b/neuralforecast/models/autoformer.py
index 069e3641..6deeb800 100644
--- a/neuralforecast/models/autoformer.py
+++ b/neuralforecast/models/autoformer.py
@@ -441,12 +441,11 @@ class Autoformer(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
- `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
+ `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
- [Wu, Haixu, Jiehui Xu, Jianmin Wang, and Mingsheng Long. "Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting"](https://proceedings.neurips.cc/paper/2021/hash/bcc0d400288793e8bdcd7c19a8ac0c2b-Abstract.html)
@@ -492,11 +491,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super(Autoformer, self).__init__(
@@ -522,11 +518,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/bitcn.py b/neuralforecast/models/bitcn.py
index cf4fc91d..a631164c 100644
--- a/neuralforecast/models/bitcn.py
+++ b/neuralforecast/models/bitcn.py
@@ -115,11 +115,10 @@ class BiTCN(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -159,11 +158,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(BiTCN, self).__init__(
@@ -189,11 +185,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/deepar.py b/neuralforecast/models/deepar.py
index 6b16f51d..eedcc482 100644
--- a/neuralforecast/models/deepar.py
+++ b/neuralforecast/models/deepar.py
@@ -86,11 +86,10 @@ class DeepAR(BaseWindows):
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -137,11 +136,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -185,11 +181,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py
index 3edeb059..77fd2cc2 100644
--- a/neuralforecast/models/deepnpts.py
+++ b/neuralforecast/models/deepnpts.py
@@ -48,11 +48,10 @@ class DeepNPTS(BaseWindows):
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -94,11 +93,8 @@ def __init__(
scaler_type: str = "standard",
random_seed: int = 1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -139,11 +135,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/dilated_rnn.py b/neuralforecast/models/dilated_rnn.py
index 96094c96..c2c5a299 100644
--- a/neuralforecast/models/dilated_rnn.py
+++ b/neuralforecast/models/dilated_rnn.py
@@ -316,11 +316,10 @@ class DilatedRNN(BaseRecurrent):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -357,11 +356,8 @@ def __init__(
scaler_type: str = "robust",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(DilatedRNN, self).__init__(
@@ -383,11 +379,8 @@ def __init__(
stat_exog_list=stat_exog_list,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/dlinear.py b/neuralforecast/models/dlinear.py
index 3af5f11c..8a0c5892 100644
--- a/neuralforecast/models/dlinear.py
+++ b/neuralforecast/models/dlinear.py
@@ -74,11 +74,10 @@ class DLinear(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -116,11 +115,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(DLinear, self).__init__(
@@ -146,11 +142,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/fedformer.py b/neuralforecast/models/fedformer.py
index 7cfe3c5a..8620d9df 100644
--- a/neuralforecast/models/fedformer.py
+++ b/neuralforecast/models/fedformer.py
@@ -439,11 +439,10 @@ class FEDformer(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -489,11 +488,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super(FEDformer, self).__init__(
@@ -518,11 +514,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
# Architecture
diff --git a/neuralforecast/models/gru.py b/neuralforecast/models/gru.py
index f45aa757..f969d2dd 100644
--- a/neuralforecast/models/gru.py
+++ b/neuralforecast/models/gru.py
@@ -51,11 +51,10 @@ class GRU(BaseRecurrent):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -93,11 +92,8 @@ def __init__(
scaler_type: str = "robust",
random_seed=1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(GRU, self).__init__(
@@ -119,11 +115,8 @@ def __init__(
stat_exog_list=stat_exog_list,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/informer.py b/neuralforecast/models/informer.py
index cb4ff262..2865b7a8 100644
--- a/neuralforecast/models/informer.py
+++ b/neuralforecast/models/informer.py
@@ -225,11 +225,10 @@ class Informer(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -276,11 +275,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super(Informer, self).__init__(
@@ -306,11 +302,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/itransformer.py b/neuralforecast/models/itransformer.py
index 121eac2b..30e33795 100644
--- a/neuralforecast/models/itransformer.py
+++ b/neuralforecast/models/itransformer.py
@@ -133,11 +133,10 @@ class iTransformer(BaseMultivariate):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -178,11 +177,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -205,11 +201,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/kan.py b/neuralforecast/models/kan.py
index e442fdbd..a9e95012 100644
--- a/neuralforecast/models/kan.py
+++ b/neuralforecast/models/kan.py
@@ -283,9 +283,10 @@ class KAN(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -332,9 +333,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -362,9 +362,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/lstm.py b/neuralforecast/models/lstm.py
index bb8906b8..eaa67115 100644
--- a/neuralforecast/models/lstm.py
+++ b/neuralforecast/models/lstm.py
@@ -49,11 +49,10 @@ class LSTM(BaseRecurrent):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -90,11 +89,8 @@ def __init__(
scaler_type: str = "robust",
random_seed=1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(LSTM, self).__init__(
@@ -116,11 +112,8 @@ def __init__(
stat_exog_list=stat_exog_list,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/mlp.py b/neuralforecast/models/mlp.py
index 535c4142..c3f5f452 100644
--- a/neuralforecast/models/mlp.py
+++ b/neuralforecast/models/mlp.py
@@ -48,11 +48,10 @@ class MLP(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -88,11 +87,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -120,11 +116,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/mlpmultivariate.py b/neuralforecast/models/mlpmultivariate.py
index f03ec722..dcb06505 100644
--- a/neuralforecast/models/mlpmultivariate.py
+++ b/neuralforecast/models/mlpmultivariate.py
@@ -42,11 +42,10 @@ class MLPMultivariate(BaseMultivariate):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -78,11 +77,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -106,11 +102,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/nbeats.py b/neuralforecast/models/nbeats.py
index 1fb4f07b..cd55307a 100644
--- a/neuralforecast/models/nbeats.py
+++ b/neuralforecast/models/nbeats.py
@@ -227,11 +227,10 @@ class NBEATS(BaseWindows):
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -273,11 +272,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
@@ -307,11 +303,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/nbeatsx.py b/neuralforecast/models/nbeatsx.py
index 10e37f60..b08bc06d 100644
--- a/neuralforecast/models/nbeatsx.py
+++ b/neuralforecast/models/nbeatsx.py
@@ -314,11 +314,10 @@ class NBEATSx(BaseWindows):
`random_seed`: int, random seed initialization for replicability.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -364,11 +363,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
# Protect horizon collapsed seasonality and trend NBEATSx-i basis
@@ -401,11 +397,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/nhits.py b/neuralforecast/models/nhits.py
index f16db81a..d28936d5 100644
--- a/neuralforecast/models/nhits.py
+++ b/neuralforecast/models/nhits.py
@@ -225,11 +225,10 @@ class NHITS(BaseWindows):
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -277,11 +276,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
@@ -309,11 +305,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/nlinear.py b/neuralforecast/models/nlinear.py
index 4bad929b..bf633fe0 100644
--- a/neuralforecast/models/nlinear.py
+++ b/neuralforecast/models/nlinear.py
@@ -38,11 +38,10 @@ class NLinear(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -79,11 +78,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(NLinear, self).__init__(
@@ -109,11 +105,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/patchtst.py b/neuralforecast/models/patchtst.py
index 25770b71..95a5d7e7 100644
--- a/neuralforecast/models/patchtst.py
+++ b/neuralforecast/models/patchtst.py
@@ -835,11 +835,10 @@ class PatchTST(BaseWindows):
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -893,11 +892,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(PatchTST, self).__init__(
@@ -923,11 +919,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/rmok.py b/neuralforecast/models/rmok.py
index fc66483d..97e179d0 100644
--- a/neuralforecast/models/rmok.py
+++ b/neuralforecast/models/rmok.py
@@ -283,11 +283,10 @@ class RMoK(BaseMultivariate):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
Reference
@@ -325,11 +324,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -352,11 +348,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/rnn.py b/neuralforecast/models/rnn.py
index d3f8b4ff..8b486512 100644
--- a/neuralforecast/models/rnn.py
+++ b/neuralforecast/models/rnn.py
@@ -49,13 +49,11 @@ class RNN(BaseRecurrent):
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`alias`: str, optional, Custom name of the model.
-
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -93,11 +91,8 @@ def __init__(
scaler_type: str = "robust",
random_seed=1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(RNN, self).__init__(
@@ -119,11 +114,8 @@ def __init__(
stat_exog_list=stat_exog_list,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/softs.py b/neuralforecast/models/softs.py
index a40f32be..1a521974 100644
--- a/neuralforecast/models/softs.py
+++ b/neuralforecast/models/softs.py
@@ -108,11 +108,10 @@ class SOFTS(BaseMultivariate):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -151,11 +150,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -178,11 +174,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/stemgnn.py b/neuralforecast/models/stemgnn.py
index 69cdc4ef..f3513e8f 100644
--- a/neuralforecast/models/stemgnn.py
+++ b/neuralforecast/models/stemgnn.py
@@ -168,11 +168,10 @@ class StemGNN(BaseMultivariate):
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -206,11 +205,8 @@ def __init__(
scaler_type: str = "robust",
random_seed: int = 1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -234,11 +230,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
# Quick fix for now, fix the model later.
diff --git a/neuralforecast/models/tcn.py b/neuralforecast/models/tcn.py
index 70dd9c37..a63f1d38 100644
--- a/neuralforecast/models/tcn.py
+++ b/neuralforecast/models/tcn.py
@@ -46,11 +46,10 @@ class TCN(BaseRecurrent):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
"""
@@ -87,11 +86,8 @@ def __init__(
scaler_type: str = "robust",
random_seed: int = 1,
drop_last_loader=False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(TCN, self).__init__(
@@ -113,11 +109,8 @@ def __init__(
stat_exog_list=stat_exog_list,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/tft.py b/neuralforecast/models/tft.py
index 616496fc..ba27b59e 100644
--- a/neuralforecast/models/tft.py
+++ b/neuralforecast/models/tft.py
@@ -456,11 +456,10 @@ class TFT(BaseWindows):
`random_seed`: int, random seed initialization for replicability.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -503,11 +502,8 @@ def __init__(
scaler_type: str = "robust",
drop_last_loader=False,
random_seed: int = 1,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
@@ -534,11 +530,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
self.example_length = input_size + h
diff --git a/neuralforecast/models/tide.py b/neuralforecast/models/tide.py
index 1f8f7144..7a8aadaa 100644
--- a/neuralforecast/models/tide.py
+++ b/neuralforecast/models/tide.py
@@ -80,11 +80,10 @@ class TiDE(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -130,11 +129,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -162,11 +158,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
self.h = h
diff --git a/neuralforecast/models/timellm.py b/neuralforecast/models/timellm.py
index bec5fb45..dbf0869b 100644
--- a/neuralforecast/models/timellm.py
+++ b/neuralforecast/models/timellm.py
@@ -213,11 +213,10 @@ class TimeLLM(BaseWindows):
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -270,11 +269,8 @@ def __init__(
scaler_type: str = "identity",
drop_last_loader: bool = False,
random_seed: int = 1,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super(TimeLLM, self).__init__(
@@ -299,11 +295,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/timemixer.py b/neuralforecast/models/timemixer.py
index cdaea20b..da8af5ab 100644
--- a/neuralforecast/models/timemixer.py
+++ b/neuralforecast/models/timemixer.py
@@ -284,11 +284,10 @@ class TimeMixer(BaseMultivariate):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -334,11 +333,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
@@ -361,11 +357,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)
diff --git a/neuralforecast/models/timesnet.py b/neuralforecast/models/timesnet.py
index 9089a879..031fd699 100644
--- a/neuralforecast/models/timesnet.py
+++ b/neuralforecast/models/timesnet.py
@@ -180,14 +180,11 @@ class TimesNet(BaseWindows):
Random_seed for pytorch initializer and numpy generators.
drop_last_loader : bool (default=False)
If True `TimeSeriesDataLoader` drops last non-full batch.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)
- User specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional (defualt=None)
- List of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional (default=None)
List of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
**trainer_kwargs
Keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer)
@@ -232,11 +229,8 @@ def __init__(
scaler_type: str = "standard",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
super(TimesNet, self).__init__(
@@ -262,11 +256,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/tsmixer.py b/neuralforecast/models/tsmixer.py
index 17fae38b..35542615 100644
--- a/neuralforecast/models/tsmixer.py
+++ b/neuralforecast/models/tsmixer.py
@@ -159,11 +159,10 @@ class TSMixer(BaseMultivariate):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -201,11 +200,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -229,11 +225,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
diff --git a/neuralforecast/models/tsmixerx.py b/neuralforecast/models/tsmixerx.py
index 97747bbb..ef025fd6 100644
--- a/neuralforecast/models/tsmixerx.py
+++ b/neuralforecast/models/tsmixerx.py
@@ -187,11 +187,10 @@ class TSMixerx(BaseMultivariate):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -229,11 +228,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs
):
@@ -257,11 +253,8 @@ def __init__(
scaler_type=scaler_type,
random_seed=random_seed,
drop_last_loader=drop_last_loader,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs
)
# Reversible InstanceNormalization layer
diff --git a/neuralforecast/models/vanillatransformer.py b/neuralforecast/models/vanillatransformer.py
index e38c03fc..86177a4f 100644
--- a/neuralforecast/models/vanillatransformer.py
+++ b/neuralforecast/models/vanillatransformer.py
@@ -116,11 +116,10 @@ class VanillaTransformer(BaseWindows):
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
- `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
- `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
- `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
- `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
`dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
+ `config_optimizers`: , optional, A callable function that implements the optimization behavior as detailed in
+ https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.core.LightningModule.html#lightning.pytorch.core.LightningModule.configure_optimizers
+ Note that the function must accept an argument which is the subclass of Neuralforecast's `BaseModel` to speficy the model's parameters() for the optimizer.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -164,11 +163,8 @@ def __init__(
scaler_type: str = "identity",
random_seed: int = 1,
drop_last_loader: bool = False,
- optimizer=None,
- optimizer_kwargs=None,
- lr_scheduler=None,
- lr_scheduler_kwargs=None,
dataloader_kwargs=None,
+ config_optimizers=None,
**trainer_kwargs,
):
super(VanillaTransformer, self).__init__(
@@ -193,11 +189,8 @@ def __init__(
scaler_type=scaler_type,
drop_last_loader=drop_last_loader,
random_seed=random_seed,
- optimizer=optimizer,
- optimizer_kwargs=optimizer_kwargs,
- lr_scheduler=lr_scheduler,
- lr_scheduler_kwargs=lr_scheduler_kwargs,
dataloader_kwargs=dataloader_kwargs,
+ config_optimizers=config_optimizers,
**trainer_kwargs,
)