From 708b360a6cff8391850fc5b7cfb3cb08e686e011 Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Tue, 22 Oct 2024 09:28:15 +1100 Subject: [PATCH 01/11] enable setting gpu optimization variables --- nbs/common.base_model.ipynb | 2 + nbs/common.base_multivariate.ipynb | 4 ++ nbs/common.base_recurrent.ipynb | 6 +++ nbs/common.base_windows.ipynb | 4 ++ nbs/models.autoformer.ipynb | 6 +++ nbs/models.bitcn.ipynb | 6 +++ nbs/models.deepar.ipynb | 6 +++ nbs/models.deepnpts.ipynb | 6 +++ nbs/models.dilated_rnn.ipynb | 6 +++ nbs/models.dlinear.ipynb | 6 +++ nbs/models.fedformer.ipynb | 6 +++ nbs/models.gru.ipynb | 6 +++ nbs/models.informer.ipynb | 6 +++ nbs/models.itransformer.ipynb | 9 ++++ nbs/models.kan.ipynb | 6 +++ nbs/models.lstm.ipynb | 6 +++ nbs/models.mlp.ipynb | 6 +++ nbs/models.mlpmultivariate.ipynb | 9 ++++ nbs/models.nbeats.ipynb | 6 +++ nbs/models.nbeatsx.ipynb | 6 +++ nbs/models.nhits.ipynb | 6 +++ nbs/models.nlinear.ipynb | 6 +++ nbs/models.patchtst.ipynb | 6 +++ nbs/models.rmok.ipynb | 9 ++++ nbs/models.rnn.ipynb | 6 +++ nbs/models.softs.ipynb | 9 ++++ nbs/models.stemgnn.ipynb | 37 +++++++++++++- nbs/models.tcn.ipynb | 6 +++ nbs/models.tft.ipynb | 6 +++ nbs/models.tide.ipynb | 6 +++ nbs/models.timellm.ipynb | 6 +++ nbs/models.timemixer.ipynb | 9 ++++ nbs/models.timesnet.ipynb | 8 +++ nbs/models.tsmixer.ipynb | 9 ++++ nbs/models.tsmixerx.ipynb | 9 ++++ nbs/models.vanillatransformer.ipynb | 6 +++ nbs/tsdataset.ipynb | 16 ++++-- neuralforecast/_modidx.py | 55 +-------------------- neuralforecast/auto.py | 2 + neuralforecast/common/_base_auto.py | 2 + neuralforecast/common/_base_model.py | 2 + neuralforecast/common/_base_multivariate.py | 6 +++ neuralforecast/common/_base_recurrent.py | 6 +++ neuralforecast/common/_base_windows.py | 6 +++ neuralforecast/common/_scalers.py | 2 + neuralforecast/core.py | 2 + neuralforecast/losses/numpy.py | 2 + neuralforecast/losses/pytorch.py | 2 + neuralforecast/models/autoformer.py | 6 +++ neuralforecast/models/bitcn.py | 6 +++ neuralforecast/models/deepar.py | 6 +++ neuralforecast/models/deepnpts.py | 6 +++ neuralforecast/models/dilated_rnn.py | 6 +++ neuralforecast/models/dlinear.py | 6 +++ neuralforecast/models/fedformer.py | 6 +++ neuralforecast/models/gru.py | 6 +++ neuralforecast/models/informer.py | 6 +++ neuralforecast/models/itransformer.py | 9 ++++ neuralforecast/models/kan.py | 6 +++ neuralforecast/models/lstm.py | 6 +++ neuralforecast/models/mlp.py | 8 +++ neuralforecast/models/mlpmultivariate.py | 11 +++++ neuralforecast/models/nbeats.py | 6 +++ neuralforecast/models/nbeatsx.py | 6 +++ neuralforecast/models/nhits.py | 6 +++ neuralforecast/models/nlinear.py | 6 +++ neuralforecast/models/patchtst.py | 6 +++ neuralforecast/models/rmok.py | 9 ++++ neuralforecast/models/rnn.py | 6 +++ neuralforecast/models/softs.py | 9 ++++ neuralforecast/models/stemgnn.py | 9 ++++ neuralforecast/models/tcn.py | 6 +++ neuralforecast/models/tft.py | 6 +++ neuralforecast/models/tide.py | 8 +++ neuralforecast/models/timellm.py | 6 +++ neuralforecast/models/timemixer.py | 9 ++++ neuralforecast/models/timesnet.py | 8 +++ neuralforecast/models/tsmixer.py | 11 +++++ neuralforecast/models/tsmixerx.py | 11 +++++ neuralforecast/models/vanillatransformer.py | 6 +++ neuralforecast/tsdataset.py | 12 +++++ neuralforecast/utils.py | 2 + 82 files changed, 548 insertions(+), 59 deletions(-) diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb index b408f6d19..df4dba6bb 100644 --- a/nbs/common.base_model.ipynb +++ b/nbs/common.base_model.ipynb @@ -363,8 +363,10 @@ " batch_size=batch_size,\n", " valid_batch_size=valid_batch_size,\n", " num_workers=self.num_workers_loader,\n", + " prefetch_factor=self.prefetch_factor,\n", " drop_last=self.drop_last_loader,\n", " shuffle_train=shuffle_train,\n", + " pin_memory=self.pin_memory,\n", " )\n", "\n", " if self.val_check_steps > self.max_steps:\n", diff --git a/nbs/common.base_multivariate.ipynb b/nbs/common.base_multivariate.ipynb index 81c933527..e2dab29e6 100644 --- a/nbs/common.base_multivariate.ipynb +++ b/nbs/common.base_multivariate.ipynb @@ -102,7 +102,9 @@ " hist_exog_list=None,\n", " stat_exog_list=None,\n", " num_workers_loader=0,\n", + " prefetch_factor=None,\n", " drop_last_loader=False,\n", + " pin_memory=False,\n", " random_seed=1, \n", " alias=None,\n", " optimizer=None,\n", @@ -173,7 +175,9 @@ "\n", " # DataModule arguments\n", " self.num_workers_loader = num_workers_loader\n", + " self.prefetch_factor=prefetch_factor\n", " self.drop_last_loader = drop_last_loader\n", + " self.pin_memory = pin_memory\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", " self.alias = alias\n", diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index 0311141c6..0297b1436 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -108,7 +108,9 @@ " hist_exog_list=None,\n", " stat_exog_list=None,\n", " num_workers_loader=0,\n", + " prefetch_factor=None,\n", " drop_last_loader=False,\n", + " pin_memory=False,\n", " random_seed=1, \n", " alias=None,\n", " optimizer=None,\n", @@ -172,7 +174,9 @@ "\n", " # DataModule arguments\n", " self.num_workers_loader = num_workers_loader\n", + " self.prefetch_factor = prefetch_factor\n", " self.drop_last_loader = drop_last_loader\n", + " self.pin_memory = pin_memory\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", " self.alias = alias\n", @@ -552,6 +556,8 @@ " dataset=dataset,\n", " valid_batch_size=self.valid_batch_size,\n", " num_workers=self.num_workers_loader,\n", + " prefetch_factor = self.prefetch_factor,\n", + " pin_memory=self.pin_memory,\n", " **data_module_kwargs\n", " )\n", " fcsts = trainer.predict(self, datamodule=datamodule)\n", diff --git a/nbs/common.base_windows.ipynb b/nbs/common.base_windows.ipynb index e48ea4123..088d071c0 100644 --- a/nbs/common.base_windows.ipynb +++ b/nbs/common.base_windows.ipynb @@ -112,7 +112,9 @@ " stat_exog_list=None,\n", " exclude_insample_y=False,\n", " num_workers_loader=0,\n", + " prefetch_factor=None,\n", " drop_last_loader=False,\n", + " pin_memory=False,\n", " random_seed=1,\n", " alias=None,\n", " optimizer=None,\n", @@ -188,7 +190,9 @@ "\n", " # DataModule arguments\n", " self.num_workers_loader = num_workers_loader\n", + " self.prefetch_factor = prefetch_factor\n", " self.drop_last_loader = drop_last_loader\n", + " self.pin_memory = pin_memory\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", " self.alias = alias\n", diff --git a/nbs/models.autoformer.ipynb b/nbs/models.autoformer.ipynb index 68998292a..f28597a5d 100644 --- a/nbs/models.autoformer.ipynb +++ b/nbs/models.autoformer.ipynb @@ -456,7 +456,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -506,7 +508,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -533,7 +537,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.bitcn.ipynb b/nbs/models.bitcn.ipynb index 7fd0d4b5c..049f90abb 100644 --- a/nbs/models.bitcn.ipynb +++ b/nbs/models.bitcn.ipynb @@ -176,7 +176,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -219,7 +221,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -248,7 +252,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.deepar.ipynb b/nbs/models.deepar.ipynb index af0ec1abf..f6faff294 100644 --- a/nbs/models.deepar.ipynb +++ b/nbs/models.deepar.ipynb @@ -181,7 +181,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -229,7 +231,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", + " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -270,7 +274,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index 78a1bc7c1..be1a50ba9 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -119,7 +119,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -164,7 +166,9 @@ " scaler_type: str = 'standard',\n", " random_seed: int = 1,\n", " num_workers_loader = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", + " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -202,7 +206,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.dilated_rnn.ipynb b/nbs/models.dilated_rnn.ipynb index 12af1b4c3..6510e6287 100644 --- a/nbs/models.dilated_rnn.ipynb +++ b/nbs/models.dilated_rnn.ipynb @@ -388,7 +388,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -428,7 +430,9 @@ " scaler_type: str = 'robust',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -452,7 +456,9 @@ " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.dlinear.ipynb b/nbs/models.dlinear.ipynb index bf9abcfaf..69f4ef666 100644 --- a/nbs/models.dlinear.ipynb +++ b/nbs/models.dlinear.ipynb @@ -160,7 +160,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -201,7 +203,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -228,7 +232,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.fedformer.ipynb b/nbs/models.fedformer.ipynb index f7c5e9f14..2b27e82de 100644 --- a/nbs/models.fedformer.ipynb +++ b/nbs/models.fedformer.ipynb @@ -449,7 +449,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -498,7 +500,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler = None,\n", @@ -524,7 +528,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.gru.ipynb b/nbs/models.gru.ipynb index 8d83bffb7..155e53f92 100644 --- a/nbs/models.gru.ipynb +++ b/nbs/models.gru.ipynb @@ -122,7 +122,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -163,7 +165,9 @@ " scaler_type: str='robust',\n", " random_seed=1,\n", " num_workers_loader=0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", + " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -187,7 +191,9 @@ " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.informer.ipynb b/nbs/models.informer.ipynb index c33fb8509..2da2ba79c 100644 --- a/nbs/models.informer.ipynb +++ b/nbs/models.informer.ipynb @@ -304,7 +304,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -354,7 +356,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -381,7 +385,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.itransformer.ipynb b/nbs/models.itransformer.ipynb index 15fe45e7e..850511825 100644 --- a/nbs/models.itransformer.ipynb +++ b/nbs/models.itransformer.ipynb @@ -195,6 +195,9 @@ "source": [ "#| export\n", "\n", + "from typing import Optional\n", + "\n", + "\n", "class iTransformer(BaseMultivariate):\n", "\n", " \"\"\" iTransformer\n", @@ -226,7 +229,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -271,7 +276,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -296,7 +303,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.kan.ipynb b/nbs/models.kan.ipynb index 91d078865..3900ada2f 100644 --- a/nbs/models.kan.ipynb +++ b/nbs/models.kan.ipynb @@ -360,7 +360,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -409,7 +411,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " **trainer_kwargs):\n", @@ -436,7 +440,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.lstm.ipynb b/nbs/models.lstm.ipynb index c1f32749f..922662c8d 100644 --- a/nbs/models.lstm.ipynb +++ b/nbs/models.lstm.ipynb @@ -119,7 +119,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -159,7 +161,9 @@ " scaler_type: str = 'robust',\n", " random_seed = 1,\n", " num_workers_loader = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", + " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -183,7 +187,9 @@ " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.mlp.ipynb b/nbs/models.mlp.ipynb index 6df1e8b86..948c6a26c 100644 --- a/nbs/models.mlp.ipynb +++ b/nbs/models.mlp.ipynb @@ -112,7 +112,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -151,7 +153,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -180,7 +184,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.mlpmultivariate.ipynb b/nbs/models.mlpmultivariate.ipynb index ad1a08f47..d67875651 100644 --- a/nbs/models.mlpmultivariate.ipynb +++ b/nbs/models.mlpmultivariate.ipynb @@ -76,6 +76,9 @@ "outputs": [], "source": [ "#| export\n", + "from typing import Optional\n", + "\n", + "\n", "class MLPMultivariate(BaseMultivariate):\n", " \"\"\" MLPMultivariate\n", "\n", @@ -106,7 +109,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -141,7 +146,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -166,7 +173,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.nbeats.ipynb b/nbs/models.nbeats.ipynb index 69cf93904..aed5c93cc 100644 --- a/nbs/models.nbeats.ipynb +++ b/nbs/models.nbeats.ipynb @@ -268,7 +268,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -313,7 +315,9 @@ " scaler_type: str ='identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -344,7 +348,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.nbeatsx.ipynb b/nbs/models.nbeatsx.ipynb index be8c810e9..0045ccfd9 100644 --- a/nbs/models.nbeatsx.ipynb +++ b/nbs/models.nbeatsx.ipynb @@ -412,7 +412,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random seed initialization for replicability.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -463,7 +465,9 @@ " scaler_type: str = \"identity\",\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -498,7 +502,9 @@ " step_size = step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.nhits.ipynb b/nbs/models.nhits.ipynb index b1b1b5c37..843cbb5c9 100644 --- a/nbs/models.nhits.ipynb +++ b/nbs/models.nhits.ipynb @@ -301,7 +301,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -352,7 +354,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", + " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -381,7 +385,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.nlinear.ipynb b/nbs/models.nlinear.ipynb index 09671cef9..a053e3cbe 100644 --- a/nbs/models.nlinear.ipynb +++ b/nbs/models.nlinear.ipynb @@ -100,7 +100,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -140,7 +142,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -167,7 +171,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.patchtst.ipynb b/nbs/models.patchtst.ipynb index ff579bf86..888b3bbc1 100644 --- a/nbs/models.patchtst.ipynb +++ b/nbs/models.patchtst.ipynb @@ -660,7 +660,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -717,7 +719,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -744,7 +748,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.rmok.ipynb b/nbs/models.rmok.ipynb index 7f4160e09..74da2ce5d 100644 --- a/nbs/models.rmok.ipynb +++ b/nbs/models.rmok.ipynb @@ -331,6 +331,9 @@ "source": [ "#| export\n", "\n", + "from typing import Optional\n", + "\n", + "\n", "class RMoK(BaseMultivariate):\n", " \"\"\" Reversible Mixture of KAN\n", " **Parameters**
\n", @@ -357,7 +360,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -399,7 +404,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -424,7 +431,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.rnn.ipynb b/nbs/models.rnn.ipynb index b5123b12f..7da5c274d 100644 --- a/nbs/models.rnn.ipynb +++ b/nbs/models.rnn.ipynb @@ -124,7 +124,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", @@ -166,7 +168,9 @@ " scaler_type: str='robust',\n", " random_seed=1,\n", " num_workers_loader=0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader=False,\n", + " pin_memory=False,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler = None,\n", @@ -190,7 +194,9 @@ " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.softs.ipynb b/nbs/models.softs.ipynb index a26e2932e..9e0603a9a 100644 --- a/nbs/models.softs.ipynb +++ b/nbs/models.softs.ipynb @@ -169,6 +169,9 @@ "source": [ "#| export\n", "\n", + "from typing import Optional\n", + "\n", + "\n", "class SOFTS(BaseMultivariate):\n", "\n", " \"\"\" SOFTS\n", @@ -198,7 +201,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -241,7 +246,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -266,7 +273,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.stemgnn.ipynb b/nbs/models.stemgnn.ipynb index 4c80cba20..acf1f4592 100644 --- a/nbs/models.stemgnn.ipynb +++ b/nbs/models.stemgnn.ipynb @@ -61,7 +61,19 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "ModuleNotFoundError", + "evalue": "No module named 'neuralforecast'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[4], line 6\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mnn\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mnn\u001b[39;00m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mnn\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mfunctional\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mF\u001b[39;00m\n\u001b[0;32m----> 6\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mneuralforecast\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlosses\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mpytorch\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m MAE\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mneuralforecast\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mcommon\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m_base_multivariate\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m BaseMultivariate\n", + "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'neuralforecast'" + ] + } + ], "source": [ "#| export\n", "import torch\n", @@ -171,6 +183,9 @@ "outputs": [], "source": [ "#| export\n", + "from typing import Optional\n", + "\n", + "\n", "class StemGNN(BaseMultivariate):\n", " \"\"\" StemGNN\n", "\n", @@ -202,7 +217,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -239,7 +256,9 @@ " scaler_type: str = 'robust',\n", " random_seed: int = 1,\n", " num_workers_loader = 0,\n", + " prefetch_factor = None,\n", " drop_last_loader = False,\n", + " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -264,7 +283,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", @@ -432,7 +453,19 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [], + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'logging' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[1], line 3\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m#| hide\u001b[39;00m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;66;03m# Test losses\u001b[39;00m\n\u001b[0;32m----> 3\u001b[0m \u001b[43mlogging\u001b[49m\u001b[38;5;241m.\u001b[39mgetLogger(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpytorch_lightning\u001b[39m\u001b[38;5;124m\"\u001b[39m)\u001b[38;5;241m.\u001b[39msetLevel(logging\u001b[38;5;241m.\u001b[39mERROR)\n\u001b[1;32m 4\u001b[0m warnings\u001b[38;5;241m.\u001b[39mfilterwarnings(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mignore\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 6\u001b[0m Y_train_df \u001b[38;5;241m=\u001b[39m AirPassengersPanel[AirPassengersPanel\u001b[38;5;241m.\u001b[39mds\u001b[38;5;241m<\u001b[39mAirPassengersPanel[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mds\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mvalues[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m12\u001b[39m]]\u001b[38;5;241m.\u001b[39mreset_index(drop\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m) \u001b[38;5;66;03m# 132 train\u001b[39;00m\n", + "\u001b[0;31mNameError\u001b[0m: name 'logging' is not defined" + ] + } + ], "source": [ "#| hide\n", "# Test losses\n", diff --git a/nbs/models.tcn.ipynb b/nbs/models.tcn.ipynb index 26d977ed1..cbfc3cd01 100644 --- a/nbs/models.tcn.ipynb +++ b/nbs/models.tcn.ipynb @@ -124,7 +124,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -164,7 +166,9 @@ " scaler_type: str ='robust',\n", " random_seed: int = 1,\n", " num_workers_loader = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", + " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -188,7 +192,9 @@ " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.tft.ipynb b/nbs/models.tft.ipynb index 2207fc64d..e3123f74f 100644 --- a/nbs/models.tft.ipynb +++ b/nbs/models.tft.ipynb @@ -702,7 +702,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random seed initialization for replicability.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -749,7 +751,9 @@ " step_size: int = 1,\n", " scaler_type: str = \"robust\",\n", " num_workers_loader=0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader=False,\n", + " pin_memory=False,\n", " random_seed: int = 1,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", @@ -780,7 +784,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.tide.ipynb b/nbs/models.tide.ipynb index e59d67217..2fe4f4162 100644 --- a/nbs/models.tide.ipynb +++ b/nbs/models.tide.ipynb @@ -165,7 +165,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -214,7 +216,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -245,7 +249,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.timellm.ipynb b/nbs/models.timellm.ipynb index dbc35a7bd..6cb6562a4 100755 --- a/nbs/models.timellm.ipynb +++ b/nbs/models.timellm.ipynb @@ -289,7 +289,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -345,7 +347,9 @@ " early_stop_patience_steps: int = -1,\n", " scaler_type: str = 'identity',\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " random_seed: int = 1,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -372,7 +376,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.timemixer.ipynb b/nbs/models.timemixer.ipynb index 00e974d39..f5779bcff 100644 --- a/nbs/models.timemixer.ipynb +++ b/nbs/models.timemixer.ipynb @@ -324,6 +324,9 @@ "source": [ "#| export\n", "\n", + "from typing import Optional\n", + "\n", + "\n", "class TimeMixer(BaseMultivariate):\n", " \"\"\" TimeMixer\n", " **Parameters**
\n", @@ -358,7 +361,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -408,7 +413,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -433,7 +440,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.timesnet.ipynb b/nbs/models.timesnet.ipynb index 80c9d5ddc..5a06ca67c 100644 --- a/nbs/models.timesnet.ipynb +++ b/nbs/models.timesnet.ipynb @@ -261,8 +261,12 @@ " Random_seed for pytorch initializer and numpy generators.\n", " num_workers_loader : int (default=0)\n", " Workers to be used by `TimeSeriesDataLoader`.\n", + " 'prefetch_factor': int (default=None) \n", + " Number of batches to be prefetched by the worker.\n", " drop_last_loader : bool (default=False)\n", " If True `TimeSeriesDataLoader` drops last non-full batch.\n", + " `pin_memory`: bool (default=False) \n", + " If True `TimeSeriesDataLoader` uses pinned memory.\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)\n", " User specified optimizer instead of the default choice (Adam).\n", " `optimizer_kwargs`: dict, optional (defualt=None)\n", @@ -311,7 +315,9 @@ " scaler_type: str = 'standard',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -338,7 +344,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.tsmixer.ipynb b/nbs/models.tsmixer.ipynb index 58971e63e..324c9c8fd 100644 --- a/nbs/models.tsmixer.ipynb +++ b/nbs/models.tsmixer.ipynb @@ -220,6 +220,9 @@ "outputs": [], "source": [ "#| export\n", + "from typing import Optional\n", + "\n", + "\n", "class TSMixer(BaseMultivariate):\n", " \"\"\" TSMixer\n", "\n", @@ -248,7 +251,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -289,7 +294,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -315,7 +322,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.tsmixerx.ipynb b/nbs/models.tsmixerx.ipynb index e27f59be0..ca4dd0269 100644 --- a/nbs/models.tsmixerx.ipynb +++ b/nbs/models.tsmixerx.ipynb @@ -244,6 +244,9 @@ "outputs": [], "source": [ "#| export\n", + "from typing import Optional\n", + "\n", + "\n", "class TSMixerx(BaseMultivariate):\n", " \"\"\" TSMixerx\n", "\n", @@ -272,7 +275,9 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -313,7 +318,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -339,7 +346,9 @@ " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.vanillatransformer.ipynb b/nbs/models.vanillatransformer.ipynb index cea633eec..75df8bcbd 100644 --- a/nbs/models.vanillatransformer.ipynb +++ b/nbs/models.vanillatransformer.ipynb @@ -196,7 +196,9 @@ " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", + " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", + " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -243,7 +245,9 @@ " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", " num_workers_loader: int = 0,\n", + " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", + " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -269,7 +273,9 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " num_workers_loader=num_workers_loader,\n", + " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", + " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/tsdataset.ipynb b/nbs/tsdataset.ipynb index da51ac93c..6c7964a25 100644 --- a/nbs/tsdataset.ipynb +++ b/nbs/tsdataset.ipynb @@ -646,16 +646,20 @@ " batch_size=32, \n", " valid_batch_size=1024,\n", " num_workers=0,\n", + " prefetch_factor=None,\n", " drop_last=False,\n", " shuffle_train=True,\n", + " pin_memory=False\n", " ):\n", " super().__init__()\n", " self.dataset = dataset\n", " self.batch_size = batch_size\n", " self.valid_batch_size = valid_batch_size\n", " self.num_workers = num_workers\n", + " self.prefetch_factor=prefetch_factor\n", " self.drop_last = drop_last\n", " self.shuffle_train = shuffle_train\n", + " self.pin_memory = pin_memory\n", " \n", " def train_dataloader(self):\n", " loader = TimeSeriesLoader(\n", @@ -663,7 +667,9 @@ " batch_size=self.batch_size, \n", " num_workers=self.num_workers,\n", " shuffle=self.shuffle_train,\n", - " drop_last=self.drop_last\n", + " drop_last=self.drop_last,\n", + " pin_memory=self.pin_memory,\n", + " prefetch_factor=self.prefetch_factor\n", " )\n", " return loader\n", " \n", @@ -673,7 +679,9 @@ " batch_size=self.valid_batch_size, \n", " num_workers=self.num_workers,\n", " shuffle=False,\n", - " drop_last=self.drop_last\n", + " drop_last=self.drop_last,\n", + " pin_memory=self.pin_memory,\n", + " prefetch_factor=self.prefetch_factor\n", " )\n", " return loader\n", " \n", @@ -682,7 +690,9 @@ " self.dataset,\n", " batch_size=self.valid_batch_size, \n", " num_workers=self.num_workers,\n", - " shuffle=False\n", + " shuffle=False,\n", + " pin_memory=self.pin_memory,\n", + " prefetch_factor=self.prefetch_factor\n", " )\n", " return loader" ] diff --git a/neuralforecast/_modidx.py b/neuralforecast/_modidx.py index 25f008ce4..1d9130429 100644 --- a/neuralforecast/_modidx.py +++ b/neuralforecast/_modidx.py @@ -506,60 +506,7 @@ 'neuralforecast/losses/pytorch.py'), 'neuralforecast.losses.pytorch.weighted_average': ( 'losses.pytorch.html#weighted_average', 'neuralforecast/losses/pytorch.py')}, - 'neuralforecast.models.autoformer': { 'neuralforecast.models.autoformer.AutoCorrelation': ( 'models.autoformer.html#autocorrelation', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelation.__init__': ( 'models.autoformer.html#autocorrelation.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelation.forward': ( 'models.autoformer.html#autocorrelation.forward', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelation.time_delay_agg_full': ( 'models.autoformer.html#autocorrelation.time_delay_agg_full', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelation.time_delay_agg_inference': ( 'models.autoformer.html#autocorrelation.time_delay_agg_inference', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelation.time_delay_agg_training': ( 'models.autoformer.html#autocorrelation.time_delay_agg_training', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelationLayer': ( 'models.autoformer.html#autocorrelationlayer', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelationLayer.__init__': ( 'models.autoformer.html#autocorrelationlayer.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.AutoCorrelationLayer.forward': ( 'models.autoformer.html#autocorrelationlayer.forward', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Autoformer': ( 'models.autoformer.html#autoformer', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Autoformer.__init__': ( 'models.autoformer.html#autoformer.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Autoformer.forward': ( 'models.autoformer.html#autoformer.forward', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Decoder': ( 'models.autoformer.html#decoder', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Decoder.__init__': ( 'models.autoformer.html#decoder.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Decoder.forward': ( 'models.autoformer.html#decoder.forward', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.DecoderLayer': ( 'models.autoformer.html#decoderlayer', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.DecoderLayer.__init__': ( 'models.autoformer.html#decoderlayer.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.DecoderLayer.forward': ( 'models.autoformer.html#decoderlayer.forward', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Encoder': ( 'models.autoformer.html#encoder', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Encoder.__init__': ( 'models.autoformer.html#encoder.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.Encoder.forward': ( 'models.autoformer.html#encoder.forward', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.EncoderLayer': ( 'models.autoformer.html#encoderlayer', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.EncoderLayer.__init__': ( 'models.autoformer.html#encoderlayer.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.EncoderLayer.forward': ( 'models.autoformer.html#encoderlayer.forward', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.LayerNorm': ( 'models.autoformer.html#layernorm', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.LayerNorm.__init__': ( 'models.autoformer.html#layernorm.__init__', - 'neuralforecast/models/autoformer.py'), - 'neuralforecast.models.autoformer.LayerNorm.forward': ( 'models.autoformer.html#layernorm.forward', - 'neuralforecast/models/autoformer.py')}, + 'neuralforecast.models.autoformer': {}, 'neuralforecast.models.bitcn': { 'neuralforecast.models.bitcn.BiTCN': ( 'models.bitcn.html#bitcn', 'neuralforecast/models/bitcn.py'), 'neuralforecast.models.bitcn.BiTCN.__init__': ( 'models.bitcn.html#bitcn.__init__', diff --git a/neuralforecast/auto.py b/neuralforecast/auto.py index b3c85892a..f0d763220 100644 --- a/neuralforecast/auto.py +++ b/neuralforecast/auto.py @@ -1,3 +1,5 @@ +"""NeuralForecast contains user-friendly implementations of neural forecasting models that allow for easy transition of computing capabilities (GPU/CPU), computation parallelization, and hyperparameter tuning.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/models.ipynb. # %% auto 0 diff --git a/neuralforecast/common/_base_auto.py b/neuralforecast/common/_base_auto.py index a44f86267..19af4c5c1 100644 --- a/neuralforecast/common/_base_auto.py +++ b/neuralforecast/common/_base_auto.py @@ -1,3 +1,5 @@ +"""Machine Learning forecasting methods are defined by many hyperparameters that control their behavior, with effects ranging from their speed and memory requirements to their predictive performance. For a long time, manual hyperparameter tuning prevailed. This approach is time-consuming, **automated hyperparameter optimization** methods have been introduced, proving more efficient than manual tuning, grid search, and random search.

The `BaseAuto` class offers shared API connections to hyperparameter optimization algorithms like [Optuna](https://docs.ray.io/en/latest/tune/examples/bayesopt_example.html), [HyperOpt](https://docs.ray.io/en/latest/tune/examples/hyperopt_example.html), [Dragonfly](https://docs.ray.io/en/latest/tune/examples/dragonfly_example.html) among others through `ray`, which gives you access to grid search, bayesian optimization and other state-of-the-art tools like hyperband.

Comprehending the impacts of hyperparameters is still a precious skill, as it can help guide the design of informed hyperparameter spaces that are faster to explore automatically.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.base_auto.ipynb. # %% auto 0 diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py index ab20ca193..ab640cb7e 100644 --- a/neuralforecast/common/_base_model.py +++ b/neuralforecast/common/_base_model.py @@ -337,8 +337,10 @@ def _fit( batch_size=batch_size, valid_batch_size=valid_batch_size, num_workers=self.num_workers_loader, + prefetch_factor=self.prefetch_factor, drop_last=self.drop_last_loader, shuffle_train=shuffle_train, + pin_memory=self.pin_memory, ) if self.val_check_steps > self.max_steps: diff --git a/neuralforecast/common/_base_multivariate.py b/neuralforecast/common/_base_multivariate.py index 8a31a2637..42c32a761 100644 --- a/neuralforecast/common/_base_multivariate.py +++ b/neuralforecast/common/_base_multivariate.py @@ -1,3 +1,5 @@ +"""The `BaseWindows` class contains standard methods shared across window-based multivariate neural networks; in contrast to recurrent neural networks these models commit to a fixed sequence length input.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.base_multivariate.ipynb. # %% auto 0 @@ -47,7 +49,9 @@ def __init__( hist_exog_list=None, stat_exog_list=None, num_workers_loader=0, + prefetch_factor=None, drop_last_loader=False, + pin_memory=False, random_seed=1, alias=None, optimizer=None, @@ -125,7 +129,9 @@ def __init__( # DataModule arguments self.num_workers_loader = num_workers_loader + self.prefetch_factor = prefetch_factor self.drop_last_loader = drop_last_loader + self.pin_memory = pin_memory # used by on_validation_epoch_end hook self.validation_step_outputs = [] self.alias = alias diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index 3502c9379..2de61d8d3 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -47,7 +47,9 @@ def __init__( hist_exog_list=None, stat_exog_list=None, num_workers_loader=0, + prefetch_factor=None, drop_last_loader=False, + pin_memory=False, random_seed=1, alias=None, optimizer=None, @@ -118,7 +120,9 @@ def __init__( # DataModule arguments self.num_workers_loader = num_workers_loader + self.prefetch_factor = prefetch_factor self.drop_last_loader = drop_last_loader + self.pin_memory = pin_memory # used by on_validation_epoch_end hook self.validation_step_outputs = [] self.alias = alias @@ -574,6 +578,8 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): dataset=dataset, valid_batch_size=self.valid_batch_size, num_workers=self.num_workers_loader, + prefetch_factor=self.prefetch_factor, + pin_memory=self.pin_memory, **data_module_kwargs, ) fcsts = trainer.predict(self, datamodule=datamodule) diff --git a/neuralforecast/common/_base_windows.py b/neuralforecast/common/_base_windows.py index cee5417ac..9310d5439 100644 --- a/neuralforecast/common/_base_windows.py +++ b/neuralforecast/common/_base_windows.py @@ -1,3 +1,5 @@ +"""The `BaseWindows` class contains standard methods shared across window-based neural networks; in contrast to recurrent neural networks these models commit to a fixed sequence length input. The class is represented by `MLP`, and other more sophisticated architectures like `NBEATS`, and `NHITS`.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.base_windows.ipynb. # %% auto 0 @@ -50,7 +52,9 @@ def __init__( stat_exog_list=None, exclude_insample_y=False, num_workers_loader=0, + prefetch_factor=None, drop_last_loader=False, + pin_memory=False, random_seed=1, alias=None, optimizer=None, @@ -129,7 +133,9 @@ def __init__( # DataModule arguments self.num_workers_loader = num_workers_loader + self.prefetch_factor = prefetch_factor self.drop_last_loader = drop_last_loader + self.pin_memory = pin_memory # used by on_validation_epoch_end hook self.validation_step_outputs = [] self.alias = alias diff --git a/neuralforecast/common/_scalers.py b/neuralforecast/common/_scalers.py index c45b58d62..182b580e8 100644 --- a/neuralforecast/common/_scalers.py +++ b/neuralforecast/common/_scalers.py @@ -1,3 +1,5 @@ +"""Temporal normalization has proven to be essential in neural forecasting tasks, as it enables network's non-linearities to express themselves. Forecasting scaling methods take particular interest in the temporal dimension where most of the variance dwells, contrary to other deep learning techniques like `BatchNorm` that normalizes across batch and temporal dimensions, and `LayerNorm` that normalizes across the feature dimension. Currently we support the following techniques: `std`, `median`, `norm`, `norm1`, `invariant`, `revin`.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.scalers.ipynb. # %% auto 0 diff --git a/neuralforecast/core.py b/neuralforecast/core.py index 85214f57a..2df06d85a 100644 --- a/neuralforecast/core.py +++ b/neuralforecast/core.py @@ -1,3 +1,5 @@ +"""NeuralForecast contains two main components, PyTorch implementations deep learning predictive models, as well as parallelization and distributed computation utilities. The first component comprises low-level PyTorch model estimator classes like `models.NBEATS` and `models.RNN`. The second component is a high-level `core.NeuralForecast` wrapper class that operates with sets of time series data stored in pandas DataFrames.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/core.ipynb. # %% auto 0 diff --git a/neuralforecast/losses/numpy.py b/neuralforecast/losses/numpy.py index fec5cec7e..630e094f4 100644 --- a/neuralforecast/losses/numpy.py +++ b/neuralforecast/losses/numpy.py @@ -1,3 +1,5 @@ +"""NeuralForecast contains a collection NumPy loss functions aimed to be used during the models' evaluation.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/losses.numpy.ipynb. # %% auto 0 diff --git a/neuralforecast/losses/pytorch.py b/neuralforecast/losses/pytorch.py index a713b5b31..0f972cc3a 100644 --- a/neuralforecast/losses/pytorch.py +++ b/neuralforecast/losses/pytorch.py @@ -1,3 +1,5 @@ +"""NeuralForecast contains a collection PyTorch Loss classes aimed to be used during the models' optimization.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/losses.pytorch.ipynb. # %% auto 0 diff --git a/neuralforecast/models/autoformer.py b/neuralforecast/models/autoformer.py index c0ead0d84..e83933051 100644 --- a/neuralforecast/models/autoformer.py +++ b/neuralforecast/models/autoformer.py @@ -440,7 +440,9 @@ class Autoformer(BaseWindows): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -492,7 +494,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -521,7 +525,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/bitcn.py b/neuralforecast/models/bitcn.py index 4674f319b..a91e1a67f 100644 --- a/neuralforecast/models/bitcn.py +++ b/neuralforecast/models/bitcn.py @@ -114,7 +114,9 @@ class BiTCN(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -159,7 +161,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -189,7 +193,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/deepar.py b/neuralforecast/models/deepar.py index 522311633..9fda0c8e7 100644 --- a/neuralforecast/models/deepar.py +++ b/neuralforecast/models/deepar.py @@ -85,7 +85,9 @@ class DeepAR(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -137,7 +139,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -184,7 +188,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py index e4766d1ea..c39b48eb4 100644 --- a/neuralforecast/models/deepnpts.py +++ b/neuralforecast/models/deepnpts.py @@ -47,7 +47,9 @@ class DeepNPTS(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -94,7 +96,9 @@ def __init__( scaler_type: str = "standard", random_seed: int = 1, num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -138,7 +142,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/dilated_rnn.py b/neuralforecast/models/dilated_rnn.py index 239a93187..006d0bb10 100644 --- a/neuralforecast/models/dilated_rnn.py +++ b/neuralforecast/models/dilated_rnn.py @@ -315,7 +315,9 @@ class DilatedRNN(BaseRecurrent): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -357,7 +359,9 @@ def __init__( scaler_type: str = "robust", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -382,7 +386,9 @@ def __init__( hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/dlinear.py b/neuralforecast/models/dlinear.py index 213f8ff4b..22dcdadc8 100644 --- a/neuralforecast/models/dlinear.py +++ b/neuralforecast/models/dlinear.py @@ -73,7 +73,9 @@ class DLinear(BaseWindows): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -116,7 +118,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -145,7 +149,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/fedformer.py b/neuralforecast/models/fedformer.py index d811b6dce..182901994 100644 --- a/neuralforecast/models/fedformer.py +++ b/neuralforecast/models/fedformer.py @@ -438,7 +438,9 @@ class FEDformer(BaseWindows): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -489,7 +491,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -517,7 +521,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/gru.py b/neuralforecast/models/gru.py index 10b9c891f..ee545e572 100644 --- a/neuralforecast/models/gru.py +++ b/neuralforecast/models/gru.py @@ -49,7 +49,9 @@ class GRU(BaseRecurrent): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -92,7 +94,9 @@ def __init__( scaler_type: str = "robust", random_seed=1, num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -117,7 +121,9 @@ def __init__( hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/informer.py b/neuralforecast/models/informer.py index 66ebc3c9a..2e9d44d2a 100644 --- a/neuralforecast/models/informer.py +++ b/neuralforecast/models/informer.py @@ -224,7 +224,9 @@ class Informer(BaseWindows): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -276,7 +278,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -305,7 +309,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/itransformer.py b/neuralforecast/models/itransformer.py index 5bbacd79a..a59021904 100644 --- a/neuralforecast/models/itransformer.py +++ b/neuralforecast/models/itransformer.py @@ -102,6 +102,9 @@ def forward(self, x, x_mark): return self.dropout(x) # %% ../../nbs/models.itransformer.ipynb 13 +from typing import Optional + + class iTransformer(BaseMultivariate): """iTransformer @@ -132,7 +135,9 @@ class iTransformer(BaseMultivariate): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -178,7 +183,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -205,7 +212,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/kan.py b/neuralforecast/models/kan.py index 6cdc162d2..767b299dd 100644 --- a/neuralforecast/models/kan.py +++ b/neuralforecast/models/kan.py @@ -282,7 +282,9 @@ class KAN(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -332,7 +334,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, **trainer_kwargs @@ -361,7 +365,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/lstm.py b/neuralforecast/models/lstm.py index a37ae7e01..7c65bbf95 100644 --- a/neuralforecast/models/lstm.py +++ b/neuralforecast/models/lstm.py @@ -48,7 +48,9 @@ class LSTM(BaseRecurrent): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -90,7 +92,9 @@ def __init__( scaler_type: str = "robust", random_seed=1, num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -115,7 +119,9 @@ def __init__( hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/mlp.py b/neuralforecast/models/mlp.py index 8ded36f7a..24f9facd4 100644 --- a/neuralforecast/models/mlp.py +++ b/neuralforecast/models/mlp.py @@ -1,3 +1,5 @@ +"""One of the simplest neural architectures are Multi Layer Perceptrons (`MLP`) composed of stacked Fully Connected Neural Networks trained with backpropagation. Each node in the architecture is capable of modeling non-linear relationships granted by their activation functions. Novel activations like Rectified Linear Units (`ReLU`) have greatly improved the ability to fit deeper networks overcoming gradient vanishing problems that were associated with `Sigmoid` and `TanH` activations. For the forecasting task the last layer is changed to follow a auto-regression problem.

**References**
-[Rosenblatt, F. (1958). "The perceptron: A probabilistic model for information storage and organization in the brain."](https://psycnet.apa.org/record/1959-09865-001)
-[Fukushima, K. (1975). "Cognitron: A self-organizing multilayered neural network."](https://pascal-francis.inist.fr/vibad/index.php?action=getRecordDetail&idt=PASCAL7750396723)
-[Vinod Nair, Geoffrey E. Hinton (2010). "Rectified Linear Units Improve Restricted Boltzmann Machines"](https://www.cs.toronto.edu/~fritz/absps/reluICML.pdf)
""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.mlp.ipynb. # %% auto 0 @@ -47,7 +49,9 @@ class MLP(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -88,7 +92,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -119,7 +125,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/mlpmultivariate.py b/neuralforecast/models/mlpmultivariate.py index 4682174e0..f11efaf7b 100644 --- a/neuralforecast/models/mlpmultivariate.py +++ b/neuralforecast/models/mlpmultivariate.py @@ -1,3 +1,5 @@ +"""One of the simplest neural architectures are Multi Layer Perceptrons (`MLP`) composed of stacked Fully Connected Neural Networks trained with backpropagation. Each node in the architecture is capable of modeling non-linear relationships granted by their activation functions. Novel activations like Rectified Linear Units (`ReLU`) have greatly improved the ability to fit deeper networks overcoming gradient vanishing problems that were associated with `Sigmoid` and `TanH` activations. For the forecasting task the last layer is changed to follow a auto-regression problem. This version is multivariate, indicating that it will predict all time series of the forecasting problem jointly.

**References**
-[Rosenblatt, F. (1958). "The perceptron: A probabilistic model for information storage and organization in the brain."](https://psycnet.apa.org/record/1959-09865-001)
-[Fukushima, K. (1975). "Cognitron: A self-organizing multilayered neural network."](https://pascal-francis.inist.fr/vibad/index.php?action=getRecordDetail&idt=PASCAL7750396723)
-[Vinod Nair, Geoffrey E. Hinton (2010). "Rectified Linear Units Improve Restricted Boltzmann Machines"](https://www.cs.toronto.edu/~fritz/absps/reluICML.pdf)
""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.mlpmultivariate.ipynb. # %% auto 0 @@ -11,6 +13,9 @@ from ..common._base_multivariate import BaseMultivariate # %% ../../nbs/models.mlpmultivariate.ipynb 6 +from typing import Optional + + class MLPMultivariate(BaseMultivariate): """MLPMultivariate @@ -41,7 +46,9 @@ class MLPMultivariate(BaseMultivariate): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -78,7 +85,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -105,7 +114,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/nbeats.py b/neuralforecast/models/nbeats.py index 5dfa5c7a2..62e1da90e 100644 --- a/neuralforecast/models/nbeats.py +++ b/neuralforecast/models/nbeats.py @@ -226,7 +226,9 @@ class NBEATS(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -273,7 +275,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -306,7 +310,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/nbeatsx.py b/neuralforecast/models/nbeatsx.py index 3a0e189e8..04e6447b1 100644 --- a/neuralforecast/models/nbeatsx.py +++ b/neuralforecast/models/nbeatsx.py @@ -313,7 +313,9 @@ class NBEATSx(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random seed initialization for replicability.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -364,7 +366,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -400,7 +404,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/nhits.py b/neuralforecast/models/nhits.py index ebe9e784d..4574d86a5 100644 --- a/neuralforecast/models/nhits.py +++ b/neuralforecast/models/nhits.py @@ -224,7 +224,9 @@ class NHITS(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -277,7 +279,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -308,7 +312,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/nlinear.py b/neuralforecast/models/nlinear.py index 8e7962413..d453e5373 100644 --- a/neuralforecast/models/nlinear.py +++ b/neuralforecast/models/nlinear.py @@ -37,7 +37,9 @@ class NLinear(BaseWindows): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -79,7 +81,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -108,7 +112,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/patchtst.py b/neuralforecast/models/patchtst.py index add87d623..5691c4a9c 100644 --- a/neuralforecast/models/patchtst.py +++ b/neuralforecast/models/patchtst.py @@ -834,7 +834,9 @@ class PatchTST(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -893,7 +895,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -922,7 +926,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/rmok.py b/neuralforecast/models/rmok.py index 7f9e5718b..14f76de00 100644 --- a/neuralforecast/models/rmok.py +++ b/neuralforecast/models/rmok.py @@ -256,6 +256,9 @@ def forward(self, x): return y # %% ../../nbs/models.rmok.ipynb 14 +from typing import Optional + + class RMoK(BaseMultivariate): """Reversible Mixture of KAN **Parameters**
@@ -282,7 +285,9 @@ class RMoK(BaseMultivariate): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -325,7 +330,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -352,7 +359,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/rnn.py b/neuralforecast/models/rnn.py index eb7918809..f8cf8ef98 100644 --- a/neuralforecast/models/rnn.py +++ b/neuralforecast/models/rnn.py @@ -49,7 +49,9 @@ class RNN(BaseRecurrent): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
@@ -93,7 +95,9 @@ def __init__( scaler_type: str = "robust", random_seed=1, num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -118,7 +122,9 @@ def __init__( hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/softs.py b/neuralforecast/models/softs.py index 120354aa2..a42b2a710 100644 --- a/neuralforecast/models/softs.py +++ b/neuralforecast/models/softs.py @@ -79,6 +79,9 @@ def forward(self, input, *args, **kwargs): return output, None # %% ../../nbs/models.softs.ipynb 10 +from typing import Optional + + class SOFTS(BaseMultivariate): """SOFTS @@ -107,7 +110,9 @@ class SOFTS(BaseMultivariate): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -151,7 +156,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -178,7 +185,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/stemgnn.py b/neuralforecast/models/stemgnn.py index 485af86b0..1db823c6c 100644 --- a/neuralforecast/models/stemgnn.py +++ b/neuralforecast/models/stemgnn.py @@ -136,6 +136,9 @@ def forward(self, x, mul_L): return forecast, backcast_source # %% ../../nbs/models.stemgnn.ipynb 9 +from typing import Optional + + class StemGNN(BaseMultivariate): """StemGNN @@ -167,7 +170,9 @@ class StemGNN(BaseMultivariate): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -206,7 +211,9 @@ def __init__( scaler_type: str = "robust", random_seed: int = 1, num_workers_loader=0, + prefetch_factor=None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -233,7 +240,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/tcn.py b/neuralforecast/models/tcn.py index 53a0d4bd9..182b68195 100644 --- a/neuralforecast/models/tcn.py +++ b/neuralforecast/models/tcn.py @@ -45,7 +45,9 @@ class TCN(BaseRecurrent): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -87,7 +89,9 @@ def __init__( scaler_type: str = "robust", random_seed: int = 1, num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -112,7 +116,9 @@ def __init__( hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/tft.py b/neuralforecast/models/tft.py index 1b972b390..d4fe3376e 100644 --- a/neuralforecast/models/tft.py +++ b/neuralforecast/models/tft.py @@ -455,7 +455,9 @@ class TFT(BaseWindows): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random seed initialization for replicability.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -502,7 +504,9 @@ def __init__( step_size: int = 1, scaler_type: str = "robust", num_workers_loader=0, + prefetch_factor: Optional[int] = None, drop_last_loader=False, + pin_memory=False, random_seed: int = 1, optimizer=None, optimizer_kwargs=None, @@ -533,7 +537,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/tide.py b/neuralforecast/models/tide.py index cdff9b7e6..c01b73785 100644 --- a/neuralforecast/models/tide.py +++ b/neuralforecast/models/tide.py @@ -1,3 +1,5 @@ +"""Time-series Dense Encoder (`TiDE`) is a MLP-based univariate time-series forecasting model. `TiDE` uses Multi-layer Perceptrons (MLPs) in an encoder-decoder model for long-term time-series forecasting. In addition, this model can handle exogenous inputs.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.tide.ipynb. # %% auto 0 @@ -79,7 +81,9 @@ class TiDE(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -130,7 +134,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -162,7 +168,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/timellm.py b/neuralforecast/models/timellm.py index 4c58a4b23..e515cb807 100644 --- a/neuralforecast/models/timellm.py +++ b/neuralforecast/models/timellm.py @@ -212,7 +212,9 @@ class TimeLLM(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -269,7 +271,9 @@ def __init__( early_stop_patience_steps: int = -1, scaler_type: str = "identity", num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, random_seed: int = 1, optimizer=None, optimizer_kwargs=None, @@ -298,7 +302,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/timemixer.py b/neuralforecast/models/timemixer.py index 602e602c7..9f632a1a8 100644 --- a/neuralforecast/models/timemixer.py +++ b/neuralforecast/models/timemixer.py @@ -249,6 +249,9 @@ def forward(self, x_list): return out_list # %% ../../nbs/models.timemixer.ipynb 12 +from typing import Optional + + class TimeMixer(BaseMultivariate): """TimeMixer **Parameters**
@@ -283,7 +286,9 @@ class TimeMixer(BaseMultivariate): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -334,7 +339,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -361,7 +368,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/timesnet.py b/neuralforecast/models/timesnet.py index c08719ca0..8b0ff9257 100644 --- a/neuralforecast/models/timesnet.py +++ b/neuralforecast/models/timesnet.py @@ -180,8 +180,12 @@ class TimesNet(BaseWindows): Random_seed for pytorch initializer and numpy generators. num_workers_loader : int (default=0) Workers to be used by `TimeSeriesDataLoader`. + 'prefetch_factor': int (default=None) + Number of batches to be prefetched by the worker. drop_last_loader : bool (default=False) If True `TimeSeriesDataLoader` drops last non-full batch. + `pin_memory`: bool (default=False) + If True `TimeSeriesDataLoader` uses pinned memory. `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None) User specified optimizer instead of the default choice (Adam). `optimizer_kwargs`: dict, optional (defualt=None) @@ -232,7 +236,9 @@ def __init__( scaler_type: str = "standard", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -261,7 +267,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/tsmixer.py b/neuralforecast/models/tsmixer.py index 0cfd33128..e8877d13e 100644 --- a/neuralforecast/models/tsmixer.py +++ b/neuralforecast/models/tsmixer.py @@ -1,3 +1,5 @@ +"""Time-Series Mixer (`TSMixer`) is a MLP-based multivariate time-series forecasting model. `TSMixer` jointly learns temporal and cross-sectional representations of the time-series by repeatedly combining time- and feature information using stacked mixing layers. A mixing layer consists of a sequential time- and feature Multi Layer Perceptron (`MLP`). Note: this model cannot handle exogenous inputs. If you want to use additional exogenous inputs, use `TSMixerx`.""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.tsmixer.ipynb. # %% auto 0 @@ -130,6 +132,9 @@ def reverse(self, x): return x # %% ../../nbs/models.tsmixer.ipynb 12 +from typing import Optional + + class TSMixer(BaseMultivariate): """TSMixer @@ -158,7 +163,9 @@ class TSMixer(BaseMultivariate): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -201,7 +208,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -229,7 +238,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/tsmixerx.py b/neuralforecast/models/tsmixerx.py index 3c38764f1..50ba93e6f 100644 --- a/neuralforecast/models/tsmixerx.py +++ b/neuralforecast/models/tsmixerx.py @@ -1,3 +1,5 @@ +"""Time-Series Mixer exogenous (`TSMixerx`) is a MLP-based multivariate time-series forecasting model, with capability for additional exogenous inputs. `TSMixerx` jointly learns temporal and cross-sectional representations of the time-series by repeatedly combining time- and feature information using stacked mixing layers. A mixing layer consists of a sequential time- and feature Multi Layer Perceptron (`MLP`).""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.tsmixerx.ipynb. # %% auto 0 @@ -158,6 +160,9 @@ def reverse(self, x): return x # %% ../../nbs/models.tsmixerx.ipynb 12 +from typing import Optional + + class TSMixerx(BaseMultivariate): """TSMixerx @@ -186,7 +191,9 @@ class TSMixerx(BaseMultivariate): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -229,7 +236,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -257,7 +266,9 @@ def __init__( scaler_type=scaler_type, random_seed=random_seed, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/vanillatransformer.py b/neuralforecast/models/vanillatransformer.py index 5bd1f933a..640cd67ce 100644 --- a/neuralforecast/models/vanillatransformer.py +++ b/neuralforecast/models/vanillatransformer.py @@ -115,7 +115,9 @@ class VanillaTransformer(BaseWindows): `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
`num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
+ 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
+ `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -164,7 +166,9 @@ def __init__( scaler_type: str = "identity", random_seed: int = 1, num_workers_loader: int = 0, + prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, + pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -192,7 +196,9 @@ def __init__( step_size=step_size, scaler_type=scaler_type, num_workers_loader=num_workers_loader, + prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, + pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/tsdataset.py b/neuralforecast/tsdataset.py index 153401cbf..4ef58772b 100644 --- a/neuralforecast/tsdataset.py +++ b/neuralforecast/tsdataset.py @@ -1,3 +1,5 @@ +"""Torch Dataset for Time Series""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/tsdataset.ipynb. # %% auto 0 @@ -587,16 +589,20 @@ def __init__( batch_size=32, valid_batch_size=1024, num_workers=0, + prefetch_factor=None, drop_last=False, shuffle_train=True, + pin_memory=False, ): super().__init__() self.dataset = dataset self.batch_size = batch_size self.valid_batch_size = valid_batch_size self.num_workers = num_workers + self.prefetch_factor = prefetch_factor self.drop_last = drop_last self.shuffle_train = shuffle_train + self.pin_memory = pin_memory def train_dataloader(self): loader = TimeSeriesLoader( @@ -605,6 +611,8 @@ def train_dataloader(self): num_workers=self.num_workers, shuffle=self.shuffle_train, drop_last=self.drop_last, + pin_memory=self.pin_memory, + prefetch_factor=self.prefetch_factor, ) return loader @@ -615,6 +623,8 @@ def val_dataloader(self): num_workers=self.num_workers, shuffle=False, drop_last=self.drop_last, + pin_memory=self.pin_memory, + prefetch_factor=self.prefetch_factor, ) return loader @@ -624,6 +634,8 @@ def predict_dataloader(self): batch_size=self.valid_batch_size, num_workers=self.num_workers, shuffle=False, + pin_memory=self.pin_memory, + prefetch_factor=self.prefetch_factor, ) return loader diff --git a/neuralforecast/utils.py b/neuralforecast/utils.py index 4a272dfcb..2e076b158 100644 --- a/neuralforecast/utils.py +++ b/neuralforecast/utils.py @@ -1,3 +1,5 @@ +"""The `core.NeuralForecast` class allows you to efficiently fit multiple `NeuralForecast` models for large sets of time series. It operates with pandas DataFrame `df` that identifies individual series and datestamps with the `unique_id` and `ds` columns, and the `y` column denotes the target time series variable. To assist development, we declare useful datasets that we use throughout all `NeuralForecast`'s unit tests.

""" + # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/utils.ipynb. # %% auto 0 From c512b222448f8760781ab4c25fa029d364fb13e2 Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Tue, 22 Oct 2024 09:48:11 +1100 Subject: [PATCH 02/11] remove unrelated changes added during export --- nbs/models.stemgnn.ipynb | 28 +- nbs/models.tft.ipynb | 877 +------------------- neuralforecast/auto.py | 2 - neuralforecast/common/_base_auto.py | 2 - neuralforecast/common/_base_multivariate.py | 2 - neuralforecast/common/_base_windows.py | 2 - neuralforecast/common/_scalers.py | 2 - neuralforecast/core.py | 2 - neuralforecast/losses/numpy.py | 2 - neuralforecast/losses/pytorch.py | 2 - neuralforecast/models/mlp.py | 2 - neuralforecast/models/mlpmultivariate.py | 2 - neuralforecast/models/tide.py | 2 - neuralforecast/models/tsmixer.py | 2 - neuralforecast/models/tsmixerx.py | 2 - neuralforecast/utils.py | 2 - 16 files changed, 21 insertions(+), 912 deletions(-) diff --git a/nbs/models.stemgnn.ipynb b/nbs/models.stemgnn.ipynb index acf1f4592..3e2bf0ec5 100644 --- a/nbs/models.stemgnn.ipynb +++ b/nbs/models.stemgnn.ipynb @@ -61,19 +61,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "ModuleNotFoundError", - "evalue": "No module named 'neuralforecast'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[4], line 6\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mnn\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mnn\u001b[39;00m\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mnn\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mfunctional\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mF\u001b[39;00m\n\u001b[0;32m----> 6\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mneuralforecast\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mlosses\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mpytorch\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m MAE\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mneuralforecast\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mcommon\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m_base_multivariate\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m BaseMultivariate\n", - "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'neuralforecast'" - ] - } - ], + "outputs": [], "source": [ "#| export\n", "import torch\n", @@ -453,19 +441,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "NameError", - "evalue": "name 'logging' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[1], line 3\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m#| hide\u001b[39;00m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;66;03m# Test losses\u001b[39;00m\n\u001b[0;32m----> 3\u001b[0m \u001b[43mlogging\u001b[49m\u001b[38;5;241m.\u001b[39mgetLogger(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpytorch_lightning\u001b[39m\u001b[38;5;124m\"\u001b[39m)\u001b[38;5;241m.\u001b[39msetLevel(logging\u001b[38;5;241m.\u001b[39mERROR)\n\u001b[1;32m 4\u001b[0m warnings\u001b[38;5;241m.\u001b[39mfilterwarnings(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mignore\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 6\u001b[0m Y_train_df \u001b[38;5;241m=\u001b[39m AirPassengersPanel[AirPassengersPanel\u001b[38;5;241m.\u001b[39mds\u001b[38;5;241m<\u001b[39mAirPassengersPanel[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mds\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mvalues[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m12\u001b[39m]]\u001b[38;5;241m.\u001b[39mreset_index(drop\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m) \u001b[38;5;66;03m# 132 train\u001b[39;00m\n", - "\u001b[0;31mNameError\u001b[0m: name 'logging' is not defined" - ] - } - ], + "outputs": [], "source": [ "#| hide\n", "# Test losses\n", diff --git a/nbs/models.tft.ipynb b/nbs/models.tft.ipynb index e3123f74f..cd9591810 100644 --- a/nbs/models.tft.ipynb +++ b/nbs/models.tft.ipynb @@ -4,15 +4,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "env: PYTORCH_ENABLE_MPS_FALLBACK=1\n" - ] - } - ], + "outputs": [], "source": [ "%set_env PYTORCH_ENABLE_MPS_FALLBACK=1" ] @@ -1024,73 +1016,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### TFT.fit\n", - "\n", - "> TFT.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ], - "text/plain": [ - "---\n", - "\n", - "### TFT.fit\n", - "\n", - "> TFT.fit (dataset, val_size=0, test_size=0, random_seed=None,\n", - "> distributed_config=None)\n", - "\n", - "Fit.\n", - "\n", - "The `fit` method, optimizes the neural network's weights using the\n", - "initialization parameters (`learning_rate`, `windows_batch_size`, ...)\n", - "and the `loss` function as defined during the initialization.\n", - "Within `fit` we use a PyTorch Lightning `Trainer` that\n", - "inherits the initialization's `self.trainer_kwargs`, to customize\n", - "its inputs, see [PL's trainer arguments](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).\n", - "\n", - "The method is designed to be compatible with SKLearn-like classes\n", - "and in particular to be compatible with the StatsForecast library.\n", - "\n", - "By default the `model` is not saving training checkpoints to protect\n", - "disk memory, to get them change `enable_checkpointing=True` in `__init__`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`val_size`: int, validation size for temporal cross-validation.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`test_size`: int, test size for temporal cross-validation.
" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(TFT.fit, name='TFT.fit', title_level=3)" ] @@ -1099,53 +1025,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "### TFT.predict\n", - "\n", - "> TFT.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ], - "text/plain": [ - "---\n", - "\n", - "### TFT.predict\n", - "\n", - "> TFT.predict (dataset, test_size=None, step_size=1, random_seed=None,\n", - "> **data_module_kwargs)\n", - "\n", - "Predict.\n", - "\n", - "Neural network prediction with PL's `Trainer` execution of `predict_step`.\n", - "\n", - "**Parameters:**
\n", - "`dataset`: NeuralForecast's `TimeSeriesDataset`, see [documentation](https://nixtla.github.io/neuralforecast/tsdataset.html).
\n", - "`test_size`: int=None, test size for temporal cross-validation.
\n", - "`step_size`: int=1, Step size between each window.
\n", - "`random_seed`: int=None, random_seed for pytorch initializer and numpy generators, overwrites model.__init__'s.
\n", - "`**data_module_kwargs`: PL's TimeSeriesDataModule args, see [documentation](https://pytorch-lightning.readthedocs.io/en/1.6.1/extensions/datamodules.html#using-a-datamodule)." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(TFT.predict, name='TFT.predict', title_level=3)" ] @@ -1154,47 +1034,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L678){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.feature_importances,\n", - "\n", - "> TFT.feature_importances, ()\n", - "\n", - "Compute the feature importances for historical, future, and static features.\n", - "\n", - "Returns:\n", - " dict: A dictionary containing the feature importances for each feature type.\n", - " The keys are 'hist_vsn', 'future_vsn', and 'static_vsn', and the values\n", - " are pandas DataFrames with the corresponding feature importances." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L678){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.feature_importances,\n", - "\n", - "> TFT.feature_importances, ()\n", - "\n", - "Compute the feature importances for historical, future, and static features.\n", - "\n", - "Returns:\n", - " dict: A dictionary containing the feature importances for each feature type.\n", - " The keys are 'hist_vsn', 'future_vsn', and 'static_vsn', and the values\n", - " are pandas DataFrames with the corresponding feature importances." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(TFT.feature_importances, name='TFT.feature_importances,', title_level=3)" ] @@ -1203,43 +1043,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L736){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.attention_weights\n", - "\n", - "> TFT.attention_weights ()\n", - "\n", - "Batch average attention weights\n", - "\n", - "Returns:\n", - "np.ndarray: A 1D array containing the attention weights for each time step." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L736){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.attention_weights\n", - "\n", - "> TFT.attention_weights ()\n", - "\n", - "Batch average attention weights\n", - "\n", - "Returns:\n", - "np.ndarray: A 1D array containing the attention weights for each time step." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(TFT.attention_weights , name='TFT.attention_weights', title_level=3)" ] @@ -1248,43 +1052,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L736){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.attention_weights\n", - "\n", - "> TFT.attention_weights ()\n", - "\n", - "Batch average attention weights\n", - "\n", - "Returns:\n", - "np.ndarray: A 1D array containing the attention weights for each time step." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L736){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.attention_weights\n", - "\n", - "> TFT.attention_weights ()\n", - "\n", - "Batch average attention weights\n", - "\n", - "Returns:\n", - "np.ndarray: A 1D array containing the attention weights for each time step." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(TFT.attention_weights , name='TFT.attention_weights', title_level=3)" ] @@ -1293,43 +1061,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/markdown": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L754){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.feature_importance_correlations\n", - "\n", - "> TFT.feature_importance_correlations ()\n", - "\n", - "Compute the correlation between the past and future feature importances and the mean attention weights.\n", - "\n", - "Returns:\n", - "pd.DataFrame: A DataFrame containing the correlation coefficients between the past feature importances and the mean attention weights." - ], - "text/plain": [ - "---\n", - "\n", - "[source](https://github.com/Nixtla/neuralforecast/blob/main/neuralforecast/models/tft.py#L754){target=\"_blank\" style=\"float:right; font-size:smaller\"}\n", - "\n", - "### TFT.feature_importance_correlations\n", - "\n", - "> TFT.feature_importance_correlations ()\n", - "\n", - "Compute the correlation between the past and future feature importances and the mean attention weights.\n", - "\n", - "Returns:\n", - "pd.DataFrame: A DataFrame containing the correlation coefficients between the past feature importances and the mean attention weights." - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "show_doc(TFT.feature_importance_correlations , name='TFT.feature_importance_correlations', title_level=3)" ] @@ -1346,344 +1078,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Seed set to 1\n", - "GPU available: True (mps), used: True\n", - "TPU available: False, using: 0 TPU cores\n", - "IPU available: False, using: 0 IPUs\n", - "HPU available: False, using: 0 HPUs\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "fb09f042057e45d181a21ef46b8d933d", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Sanity Checking: | | 0/? [00:00]. Skipping setting a default `ModelSummary` callback.\n", - "GPU available: True (mps), used: True\n", - "TPU available: False, using: 0 TPU cores\n", - "IPU available: False, using: 0 IPUs\n", - "HPU available: False, using: 0 HPUs\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "23e3f9ec08ab4b4697bf2479a6bd0bc4", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Predicting: | | 0/? [00:00" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "import pandas as pd\n", @@ -1856,18 +1251,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxYAAAGGCAYAAADmRxfNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAB3MklEQVR4nO3dd3hT9f4H8Hd2mk7oLpRSNmXIlCUioqCo4EBxMQRULnBR8DpwguOKXn+KgjgR1KuAXtyigjJEGQK2yJZdWlo6oE06krTJ+f2RntOmTdukOWna9P16njy0Jyen3xJo88n3MxSCIAggIiIiIiLygtLfCyAiIiIiouaPgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQURURO3atUqKBQKKBQKbNmypcb9giCgU6dOUCgUuOKKKxp9fQ3Vr18/KBQKvPLKKy7v//TTT7FkyZIax0tKSrBw4UKXfxe+UNs6AEChUGDhwoWNsg4ioqaOgQURUTMRGhqKFStW1Di+detWnDhxAqGhoX5YVcOkpaUhNTUVAFx+T0DdgcWiRYuaRGCxY8cOzJgxo1HWQUTU1DGwICJqJiZOnIh169bBaDQ6HV+xYgWGDBmCdu3a+Wllnnv//fcBANdddx2OHDmC7du3+3lFDTN48GC0bdvW38sgImoSGFgQETUTd9xxBwBg9erV0rHCwkKsW7cO06ZNc/kYq9WK559/Ht26dYNOp0N0dDTuuece5ObmOp23du1ajB49GvHx8QgKCkL37t3x2GOPobi42Om8qVOnIiQkBMePH8fYsWMREhKCxMREPPTQQ7BYLG59H2azGZ9++in69++P1157DQDwwQcfOJ1zxRVX4Pvvv8eZM2ekNDCFQoHTp08jOjoaALBo0SLp+NSpU6XHHjt2DHfeeSdiYmKg0+nQvXt3vPnmm07X37JlCxQKBVavXo0nnngCCQkJCAsLw1VXXYWjR4/Wuw6Rq1SoAwcOYPz48WjVqhX0ej369OmDDz/8sEFfn4ioOWFgQUTUTISFhWHChAlOL8JXr14NpVKJiRMn1jjfbrdj/PjxWLx4Me688058//33WLx4MTZu3IgrrrgCpaWl0rnHjh3D2LFjsWLFCvz444948MEH8dlnn+GGG26ocd2ysjKMGzcOo0aNwtdff41p06bhtddew0svveTW9/HFF1/g4sWLmDZtGjp37ozLLrsMa9euRVFRkXTO8uXLMWzYMMTFxWHHjh3SLT4+Hj/++CMAYPr06dLxp556CgBw6NAhDBw4EAcOHMD//d//4bvvvsN1112HuXPnYtGiRTXW8vjjj+PMmTN4//338e677+LYsWO44YYbYLPZ6lxHbY4ePYqhQ4fi4MGDeOONN/DFF18gJSUFU6dOxcsvv+zx1ycialYEIiJq0lauXCkAEHbv3i1s3rxZACAcOHBAEARBGDhwoDB16lRBEAShR48ewogRI6THrV69WgAgrFu3zul6u3fvFgAIy5cvd/n17Ha7UFZWJmzdulUAIOzbt0+6b8qUKQIA4bPPPnN6zNixY4WuXbu69f1ceeWVgl6vFy5evOj0/a1YscLpvOuuu05ISkqq8fjc3FwBgPDMM8/UuG/MmDFC27ZthcLCQqfjc+bMEfR6vXDhwgVBEATp73Hs2LFO53322WcCAGHHjh31rkMQhBrruP322wWdTiekp6c7nXfttdcKBoNBKCgo8PjrExE1F9yxICJqRkaMGIGOHTvigw8+wP79+7F79+5a06C+++47RERE4IYbbkB5ebl069OnD+Li4pyKn0+ePIk777wTcXFxUKlU0Gg0GDFiBADg8OHDTtdVKBQ1djJ69+6NM2fO1Lv+U6dOYfPmzbj55psREREBALj11lsRGhpaIx3KU2azGb/88gtuuukmGAwGp+957NixMJvN2Llzp9Njxo0bV+P7AODW9+LKpk2bMGrUKCQmJjodnzp1KkpKSmrsdsj99YmI/Ent7wUQEZH7FAoF7rnnHrzxxhswm83o0qULhg8f7vLc8+fPo6CgAFqt1uX9eXl5AICioiIMHz4cer0ezz//PLp06QKDwYCzZ8/i5ptvdkqZAgCDwQC9Xu90TKfTwWw217v+Dz74AIIgYMKECSgoKJCOjxs3Dp988gmOHDmCbt261XsdV/Lz81FeXo6lS5di6dKlLs8Rv2dRZGSk0+c6nQ4AanzPnqwhPj6+xvGEhATpfl9+fSIif2JgQUTUzEydOhVPP/003n77bbzwwgu1nhcVFYXIyEipJqE6sT3tpk2bcO7cOWzZskXapQDg9MJfDna7HatWrQIA3HzzzS7P+eCDD1zWIrijVatWUKlUmDRpEmbPnu3ynOTk5AZd212RkZHIysqqcfzcuXMAHM8JEVGgYmBBRNTMtGnTBg8//DCOHDmCKVOm1Hre9ddfjzVr1sBms2HQoEG1nid2ORLfLRe988478iy4wk8//YSMjAzMnj0bEyZMqHH/nDlz8NFHH+Hf//431Go1dDqdy3fua3tX32AwYOTIkUhNTUXv3r1r3anxVG3rcGXUqFH48ssvce7cOWmXAgA++ugjGAwGDB48WJY1ERE1RQwsiIiaocWLF9d7zu23345PPvkEY8eOxQMPPIBLL70UGo0GGRkZ2Lx5M8aPH4+bbroJQ4cORatWrTBz5kw888wz0Gg0+OSTT7Bv3z5Z17xixQqo1Wo8/vjjTi+6Rffffz/mzp2L77//HuPHj0evXr3wxRdf4K233kL//v2hVCoxYMAAhIaGIikpCV9//TVGjRqF1q1bIyoqCu3bt8frr7+Oyy67DMOHD8c//vEPtG/fHiaTCcePH8e3336LTZs2ebzu2tbhyjPPPIPvvvsOI0eOxNNPP43WrVvjk08+wffff4+XX34Z4eHhHn99IqLmgsXbREQBSqVS4ZtvvsHjjz+OL774AjfddBNuvPFGLF68GHq9Hr169QLgSN/5/vvvYTAYcPfdd2PatGkICQnB2rVrZVtLXl4evv32W1x//fUugwoAmDRpEoKCgqRJ3A888AAmTJiAxx9/HIMHD8bAgQOlc1esWAGDwYBx48Zh4MCB0iyJlJQU/Pnnn+jZsyeefPJJjB49GtOnT8f//vc/jBo1qkFrr2sd1XXt2hXbt29H165dMXv2bNx44404cOAAVq5ciYcffrhBX5+IqLlQCIIg+HsRRERERETUvHHHgoiIiIiIvMbAgoiIiIiIvMbAgoiIiIiIvMbAgoiIiIiIvMbAgoiIiIiIvMbAgoiIiIiIvMYBeS7Y7XacO3cOoaGh0kRaIiIiIqKWRhAEmEwmJCQkQKmse0+CgYUL586dQ2Jior+XQURERETUJJw9exZt27at8xwGFi6EhoYCcPwFhoWF+Xk1RERERET+YTQakZiYKL0+rgsDCxfE9KewsDAGFkRERETU4rlTHsDibSIiIiIi8hp3LIiIiIiameLiYvznP/9xOvbwww8jODjYTysiAhSCIAj+XkRTYzQaER4ejsLCQqZCERERUZOTm5uLmJgYp2M5OTmIjo7204ooUHnyupg7Fl6w2WwoKyvz9zIogGk0GqhUKn8vg4iIiKheDCwaQBAEZGdno6CgwN9LoRYgIiICcXFxnKlCRERETRoDiwYQg4qYmBgYDAa+4COfEAQBJSUlyMnJAQDEx8f7eUVEREREtWNg4SGbzSYFFZGRkf5eDgW4oKAgAI682ZiYGKZFERERUZPFdrMeEmsqDAaDn1dCLYX4b431PERERNSUMbBoIKY/UWPhvzUiIiJqDhhYEBERERGR1xhYkM+0b98eS5YskT5XKBT46quvvLqmHNcgIt8rspRj9qd/4of9Wf5eChERNRIGFtRosrKycO2117p17sKFC9GnTx+vruELq1atQkREhGzXq+37JGruth/Pw/d/ZeGdX0/6eylERNRI2BWK6mS1WqHVamW5VlxcXJO4RlMgCAJsNpu/l0HkM8XWcseflnI/r4SIiBoLdyxklJub2+BbaWlprdfNy8tz+RhPXXHFFZgzZw7mzJmDiIgIREZG4sknn4QgCNI57du3x/PPP4+pU6ciPDwc9957LwBg+/btuPzyyxEUFITExETMnTsXxcXF0uNycnJwww03ICgoCMnJyfjkk09qfP3qaUwZGRm4/fbb0bp1awQHB2PAgAHYtWsXVq1ahUWLFmHfvn1QKBRQKBRYtWqVy2vs378fV155JYKCghAZGYn77rsPRUVF0v1Tp07FjTfeiFdeeQXx8fGIjIzE7Nmz6+ywtG/fPowcORKhoaEICwtD//79sWfPHmzZsgX33HMPCgsLpXUtXLgQAPDf//4XAwYMQGhoKOLi4nDnnXdK8ycAYMuWLVAoFPjpp58wYMAA6HQ6fPzxx7V+n0TNXanVDgAosTKAJiJqKbhjIaOYmJgGP3bZsmWYPXu2y/u6d++OvLy8GserBgTu+vDDDzF9+nTs2rULe/bswX333YekpCQpgACA//znP3jqqafw5JNPAnC8eB8zZgyee+45rFixArm5uVKAsnLlSgCOF/Bnz57Fpk2boNVqMXfuXKcX1tUVFRVhxIgRaNOmDb755hvExcXhzz//hN1ux8SJE3HgwAH8+OOP+PnnnwEA4eHhNa5RUlKCa665BoMHD8bu3buRk5ODGTNmYM6cOU4v0Ddv3oz4+Hhs3rwZx48fx8SJE9GnTx+n77mqu+66C3379sVbb70FlUqFtLQ0aDQaDB06FEuWLMHTTz+No0ePAgBCQkIAOHZ2nnvuOXTt2hU5OTmYN28epk6divXr1ztd+5FHHsErr7yCDh06QK/X46GHHqr3+yRqjkoqdixKyxhYEBG1GIKfvfnmm0L79u0FnU4n9OvXT/j1119rPXfdunXCVVddJURFRQmhoaHC4MGDhR9//LHGef/73/+E7t27C1qtVujevbvwxRdfeLSmwsJCAYBQWFhY477S0lLh0KFDQmlpaY37ADT4tmzZslrXExUV5fIxnhoxYoTQvXt3wW63S8ceffRRoXv37tLnSUlJwo033uj0uEmTJgn33Xef07Ft27YJSqVSKC0tFY4ePSoAEHbu3Cndf/jwYQGA8Nprrzn9/Xz55ZeCIAjCO++8I4SGhgr5+fku1/rMM88Il1xySY3jVa/x7rvvCq1atRKKioqk+7///ntBqVQK2dnZgiAIwpQpU4SkpCShvLxcOufWW28VJk6c6PLrCoIghIaGCqtWrXJ538qVK4Xw8PBaHyv6448/BACCyWQSBEEQNm/eLAAQvvrqK7e+z6rq+jdH1FQt/eVvIenR74QuT6z391KIAlJOTk6N1wU5OTn+XhYFoLpeF1fn11SotWvX4sEHH8QTTzyB1NRUDB8+HNdeey3S09Ndnv/rr7/i6quvxvr167F3716MHDkSN9xwA1JTU6VzduzYgYkTJ2LSpEnYt28fJk2ahNtuuw27du1qrG+rSRs8eLDTXIQhQ4bg2LFjTvn+AwYMcHrM3r17sWrVKoSEhEi3MWPGwG6349SpUzh8+DDUarXT47p161ZnkXNaWhr69u2L1q1bN/h7OXz4MC655BIEBwdLx4YNGwa73S7tKABAjx49nCZWx8fH17mbMn/+fMyYMQNXXXUVFi9ejBMnTtS7ltTUVIwfPx5JSUkIDQ3FFVdcAQA1/i1X/7slClRiCpSl3A6b3fPdVSKqm0KhQFRUlNONc4/I3/waWLz66quYPn06ZsyYge7du2PJkiVITEzEW2+95fL8JUuW4JFHHsHAgQPRuXNn/Pvf/0bnzp3x7bffOp1z9dVXY8GCBejWrRsWLFiAUaNGObU9pbpVfaEOAHa7Hffffz/S0tKk2759+3Ds2DF07NhRSsny5AdaUFCQ1+sUBKHWr1n1uEajqXGf3W6v9boLFy7EwYMHcd1112HTpk1ISUnBl19+Wev5xcXFGD16NEJCQvDf//4Xu3fvls63Wq1O51b/uyUKVFVToJgORSS/qKioGrWXUVFR/l4WtXB+q7GwWq3Yu3cvHnvsMafjo0ePxvbt2926ht1uh8lkcnrXe8eOHZg3b57TeWPGjKkzsLBYLLBYLNLnRqPRra9fXV3vgtdHzNV35fDhww2qp3Bl586dNT7v3Lmz0zv61fXr1w8HDx5Ep06dXN7fvXt3lJeXY8+ePbj00ksBAEePHkVBQUGt1+zduzfef/99XLhwweWuhVarrbdrUkpKCj788EMUFxdLL9h///13KJVKdOnSpc7H1qdLly7o0qUL5s2bhzvuuAMrV67ETTfd5HJdR44cQV5eHhYvXozExEQAwJ49e9z6Ou58n0TNUWmVou0SazlCdCzpIyIKdH7bscjLy4PNZkNsbKzT8djYWGRnZ7t1jf/7v/9DcXExbrvtNulYdna2x9d88cUXER4eLt3EF4eeio6ObvCtrnfwo6KiXD6mIc6ePYv58+fj6NGjWL16NZYuXYoHHnigzsc8+uij2LFjB2bPno20tDQcO3YM33zzDf75z38CALp27YprrrkG9957L3bt2oW9e/dixowZdX5Pd9xxB+Li4nDjjTfi999/x8mTJ7Fu3Trs2LEDgKM71alTp5CWloa8vDynwE901113Qa/XY8qUKThw4AA2b96Mf/7zn5g0aVKNfwPuKi0txZw5c7BlyxacOXMGv//+O3bv3o3u3btL6yoqKsIvv/yCvLw8lJSUoF27dtBqtVi6dClOnjyJb775Bs8995xbX8+d75OoOXLasWBnKCKiFsHv7Warp7LUld5S1erVq7Fw4UKsXbu2RjcmT6+5YMECFBYWSrezZ8968B00L5MnT0ZpaSkuvfRSzJ49G//85z9x33331fmY3r17Y+vWrTh27BiGDx+Ovn374qmnnkJ8fLx0zsqVK5GYmIgRI0bg5ptvxn333VdnlyytVosNGzYgJiYGY8eORa9evbB48WJp5+SWW27BNddcg5EjRyI6OhqrV6+ucQ2DwYCffvoJFy5cwMCBAzFhwgSMGjUKy5Yta+DfDqBSqZCfn4/JkyejS5cuuO2223Dttddi0aJFAIChQ4di5syZmDhxIqKjo/Hyyy8jOjoaq1atwueff46UlBQsXrwYr7zyiltfz53vk6g5ct6xYGBBRNQSKAS5cmw8ZLVaYTAY8Pnnn+Omm26Sjj/wwANIS0vD1q1ba33s2rVrcc899+Dzzz/Hdddd53Rfu3btMG/ePKd0qNdeew1LlizBmTNn3Fqb0WhEeHg4CgsLERYW5nSf2WzGqVOnkJycDL1e79b1moorrrgCffr0Yb1JM9Oc/81RyzVpxS5sO+Zok73uH0PRP6mVn1dEREQNUdfr4ur8tmOh1WrRv39/bNy40en4xo0bMXTo0Foft3r1akydOhWffvppjaACcHQ5qn7NDRs21HlNIiKSV9UdC6ZCERG1DH6tpps/fz4mTZqEAQMGYMiQIXj33XeRnp6OmTNnAnCkKGVmZuKjjz4C4AgqJk+ejNdffx2DBw+W6iaCgoKkwWIPPPAALr/8crz00ksYP348vv76a/z888/47bff/PNNEhG1QFXTn4orhuURkXxKS0vxwQcfOB2bNm2aLF0XiRrKr4HFxIkTkZ+fj2effRZZWVno2bMn1q9fj6SkJABAVlaW0xyAd955B+Xl5Zg9e7bTlOopU6ZIk5aHDh2KNWvW4Mknn8RTTz2Fjh07Yu3atRg0aFCjfm9N0ZYtW/y9BCJqIcws3ibyqaKiIsyZM8fp2G233cbAgvzK7/3/Zs2ahVmzZrm8TwwWRO6+MJ4wYQImTJjg5cqIiKihqnaFYvE2EVHL4PeuUEREFHhKqs2xICKiwMfAooHqmtxMJCf+W6PmiHMsiIhaHr+nQjU3Wq0WSqUS586dQ3R0NLRarVtzN4g8JQgCrFYrcnNzoVQqodVq/b0kIrfY7AKs5ZUBcUkZAwsiopaAgYWHlEolkpOTkZWVhXPnzvl7OdQCGAwGtGvXDkolNxipeSitFkhwx4KIqGVgYNEAWq0W7dq1Q3l5OWw2/sIk31GpVFCr1dwVo2aleiDBGgsiopaBgUUDKRQKaDQaaDQafy+FiKhJqR5YFHPHgoioRWBuBRERyYqpUERELRMDCyIiklX11CemQhERtQwMLIiISFbcsSAiapkYWBARkaxqFm8zsCAiagkYWBARkazEHQud2vErhoEFEVHLwMCCiIhkJe5YRIXoHJ9zQB4RUYvAwIKIiGQlBhKtgx3T4lm8TUTUMnCOBRERyUrcsYgMcQQW5jI7bHYBKiUHPRLJJTo6GoIg+HsZRE64Y0FERLISayoig3XSMaZDEREFPgYWREQkK3NFENHKoIGiYpOC6VBERIGPgQUREclK3J0waFUI0qgcx9gZiogo4DGwICIiWYmpUEFaNQxaldMxIiIKXAwsiIhIVuKORZBGiSAGFkRELQa7QhERkazEtCeDVg2DRu10jIjkYbFY8M033zgdGzduHHQ6XS2PIPI9BhZERCQrMYjQa1VVdixYvE0kJ6PRiNtuu83pWE5ODqKjo/20IiKmQhERkcxKxOJtjYo1FkRELQgDCyIikpVZKt5WwaB1bIwzsCAiCnwMLIiISFZi8bbeaceCqVBERIGOgQUREcmqxFo5x0IMLFi8TUQU+BhYEBGRrMxSu9kqxdtlDCyIiAIdAwsiIpKNIAhS2hN3LIiIWhYGFkREJBurzQ674PhY71S8zRoLIqJAx8CCiIhkY7bapY+DNCoEadhuloiopWBgQUREsikpc+xMaFQKaFRKpkIREbUgDCyIiEg20tTtip0Kg86RClXMVCgiooDHwIKIiGRTtdUs4Ji+DXDHgoioJWBgQUREsqnaahZAlQF5DCyIiAIdAwsiIpKNOHU7qKIbVBADCyKiFoOBBRERyUYMIII0jl8vYrvZUg7IIyIKeGp/L4CIiAKHlAqlrZ4KxeJtIjlFRkYiJyenxjEif2JgQUREsqncsXBOhTKX2WG3C1AqFX5bG1EgUSqViI6O9vcyiJwwFYqIiGQjdn+qvmMBMB2KiCjQMbAgIiLZiMGD2GZWr64MLDjLgogosDGwICIi2VTfsVAqFZy+TUTUQjCwICIi2Yg7FuLkbYCzLIiIWgoWbxMRkWyqT94GOMuCyBesViu2b9/udGzo0KHQarV+WhERAwsiIpJR9cnbAGCo6BDFVCgi+RQWFmLkyJFOx3JyctgpivyKqVBERCQbcV5FkMsdCxZvExEFMgYWREQkm9IyO4BqOxZi8TbbzRIRBTQGFkREJBtzta5QAIu3iYhaCgYWREQkm5IyV6lQjhqLYgtToYiIAhkDCyIiko00x6JKKlQw51gQEbUIDCyIiEg2pXW1m2WNBRFRQGNgQUREsil11W6WOxZERC0CAwsiIpKNFFg4FW87aizYbpaIKLAxsCAiIlnY7QLMLtrNih+zKxQRUWBjYEFERLIwl1cGDq7azTIViogosDGwICIiWVTdkdCrXU3eZmBBRBTIGFgQEZEsxB0JvUYJpVIhHWeNBRFRy8DAgoiIZCEWbouBhCiYOxZERC0CAwsiIpKFq+F4AFOhiIhaCr8HFsuXL0dycjL0ej369++Pbdu21XpuVlYW7rzzTnTt2hVKpRIPPvhgjXNWrVoFhUJR42Y2m334XRARkbhjodc4/2oRdzBKOSCPSDatWrXCgQMHnG6tWrXy97KohVPXf4rvrF27Fg8++CCWL1+OYcOG4Z133sG1116LQ4cOoV27djXOt1gsiI6OxhNPPIHXXnut1uuGhYXh6NGjTsf0er3s6yciokqVU7edf7UYpB0L1lgQyUWtVqNHjx7+XgaRE7/uWLz66quYPn06ZsyYge7du2PJkiVITEzEW2+95fL89u3b4/XXX8fkyZMRHh5e63UVCgXi4uKcbkRE5Fuupm4DlalQ5jI77Hah0ddFRESNw2+BhdVqxd69ezF69Gin46NHj8b27du9unZRURGSkpLQtm1bXH/99UhNTa3zfIvFAqPR6HQjIiLPiDUUVWdYAJU7FgDToYiIApnfAou8vDzYbDbExsY6HY+NjUV2dnaDr9utWzesWrUK33zzDVavXg29Xo9hw4bh2LFjtT7mxRdfRHh4uHRLTExs8NcnImpuCkvKYC23e32d2nYsqs60YAE3EVHg8nvxtkKhcPpcEIQaxzwxePBg3H333bjkkkswfPhwfPbZZ+jSpQuWLl1a62MWLFiAwsJC6Xb27NkGf30ioubkYrEVQxb/gkkrdnl9LbNUY+EcWCiVCinYYJ0FEVHg8lvxdlRUFFQqVY3diZycnBq7GN5QKpUYOHBgnTsWOp0OOp1Otq9JRNRcnMovRonVhkPnvE8BFXcj9NUCC8ARbJSW2bhjQSST8vLyGo1qunbtCrXar315qIXz246FVqtF//79sXHjRqfjGzduxNChQ2X7OoIgIC0tDfHx8bJdk4goUJjMjh2EImu514XVtaVCAYBBx1kWRHK6ePEievbs6XS7ePGiv5dFLZxfw9r58+dj0qRJGDBgAIYMGYJ3330X6enpmDlzJgBHilJmZiY++ugj6TFpaWkAHAXaubm5SEtLg1arRUpKCgBg0aJFGDx4MDp37gyj0Yg33ngDaWlpePPNNxv9+yMiauqKKgILQQCKreUI1WsafK3SijSn6qlQAGDQVMyyYGBBRBSw/BpYTJw4Efn5+Xj22WeRlZWFnj17Yv369UhKSgLgGIiXnp7u9Ji+fftKH+/duxeffvopkpKScPr0aQBAQUEB7rvvPmRnZyM8PBx9+/bFr7/+iksvvbTRvi8ioubCZC6r8rGXgYU0IK9mYBHEWRZERAHP74l4s2bNwqxZs1zet2rVqhrHBKHurfrXXnutzuF5RERUqchS+UJfTItqqJJairerHmO7WSKiwOX3rlBEROQ/xirBRJGlrI4z62euq8ZCyxoLIqJAx8CCiKgFq5oKZfRyx0Iq3naxYxGkdWyQM7AgIgpcDCyIiFqwIrP8qVAudyzEORYW1lgQEQUqBhZERC1Y1WCiyNsdC2tdOxYVgQVrLIiIAhYDCyKiFsy5eNu7GgsxFcpV8XZwxRwLtpslIgpcDCyIiFqw6u1mvSEGDa7azRqkGgumQhERBSoGFkRELZjJUrUrlEzF267mWGjYFYqIKNAxsCAiasGq7lIYvU2FkuZY1ByRJM2xYGBBRBSwGFgQEbVgVVOhvCneLrPZUW53DDB1uWPBORZERAGPgQURUQtVZrPDXGaXPvemxqJqwOCqK5RUY8GuUEREAYuBBRFRC1V9h8LkxeRtceq2SqmARqWocb80eZtzLIiIAlbNRFgiImoRqhdre5MKJdVXaFRQKGoGFkyFIpJXeHg4Nm/eXOMYkT8xsCAiaqGqF2vLkQqld5EGBQDBFalQpUyFIpKFVqvFFVdc4e9lEDlhKhQRUQslBhJ6jdLp84aoq9UsUCUVinMsiIgCFgMLIqIWSkx9SogIAgBYbXZYyhu2o1DZatZ1YCGmQpnL7LBXdI8iIqLAwsCCiKiFEou148L0lccauGsh7li4mroNOAccTIciIgpMDCyIiFooccciTK9BiM5RA9HQwEJMcaptx0KvVlU5l4EFEVEgYvE2EVELZawIIkL1aoTq1SiylDe4M5S5nhoLpVKBII0KpWU2Tt8mkoHdbkd+fr7TscjISCiVfM+Y/IeBBRFRCyW2mw2pCCyyCp0ncXuitJ6uUIBjN6O0zIaSMhZwE3krPz8fMTExTsdycnIQHR3tpxURMRWKiKjFEoOI0CqpUMaGpkKVVc6xqI1YwF1s4Y4FEVEgYmBBRNRCmaQaCzVC9RoANYfmuctcsWMRVM+OBQCmQhERBSgGFkRELZRYTxGic6RCAQ1PhSpxK7BQV5zLVCgiokDEwIKIqIUyScXbGimwaGjxdn0D8oAqOxZsN0tEFJAYWBARtVAmp+JtjdMxT4nBQm3tZqvex3azRESBiYEFEVELVVm8rUaozrtUKLFuoq4diyApFYqBBRFRIGJgQUTUQomF2mF6NUL03g3Iq2/yNlDZMaqUNRZERAGJgQURUQskCIIURIToNJWpUA2evC2mQtU+HimIqVBERAGNgQURUQtUWmaDzS4AcKRChXiZCiVN3tbW/muFNRZERIHN48Di/PnzmDRpEhISEqBWq6FSqZxuRETU9Indn5QKxwv+MLErVAOLt6V2s5radywqAwumQhERBaLafwPUYurUqUhPT8dTTz2F+Ph4KBQKX6yLiIh8yFhlhoVCofA6FarUozkW3LEgIgpEHgcWv/32G7Zt24Y+ffr4YDlERNQYxJ0JMaDwtnjb7MkcCwYWREQByeNUqMTERAiC4Iu1EBFRI6naarbqn0WWctjtnv+MryzerqvdLGssiIgCmceBxZIlS/DYY4/h9OnTPlgOERE1hsqp286BBQAUeVgDIQiCe+1mxVQoTt4mIgpIHqdCTZw4ESUlJejYsSMMBgM0Go3T/RcuXJBtcURE5BtFVWosAECnVkGrUsJqs8NkLkeYXlPXw52Yy+zSx+5M3uYcCyLvhYWF4bPPPqtxjMifPA4slixZ4oNlEBFRYzJKqVCVAUSoXo38YqsUdLirtMoORF07FkyFIpKPTqfDrbfe6u9lEDnxOLCYMmWKL9ZBRESNSCzeDqmSAiUGFp7OshADC51aCZWy9k6BnGNBRBTYPA4sAMBms+Grr77C4cOHoVAokJKSgnHjxnGOBRFRM1G9xgKo0hnKw1kWYmpTXa1mAcCgEdvNMhWKiCgQeRxYHD9+HGPHjkVmZia6du0KQRDw999/IzExEd9//z06duzoi3USEZGMxHSnqrUUobqGzbIotTpqLOpqNQsABp3jfnOZHXa7AGUduxtERNT8eNwVau7cuejYsSPOnj2LP//8E6mpqUhPT0dycjLmzp3rizUSEZHMTBZHupNYvA1UnWXhWSpUibs7FlXuL2VnKCKigOPxjsXWrVuxc+dOtG7dWjoWGRmJxYsXY9iwYbIujoiIfMNVKpQ0y6KBxdv17Vjo1ZX3l1htCNY1KBuXiIiaKI93LHQ6HUwmU43jRUVF0Gq1siyKiIh8y1St3SxQmRblaSqUOHW7rlazAKBUKqTgg9O3ibyTm5sLhULhdMvNzfX3sqiF8ziwuP7663Hfffdh165dEAQBgiBg586dmDlzJsaNG+eLNRIRkcxMLtrNikGG56lQ9Q/HE0mdocpYwE1EFGg8DizeeOMNdOzYEUOGDIFer4der8ewYcPQqVMnvP76675YIxERyUxsN+sqFcrjrlBupkIBnGVBRBTIPE5wjYiIwNdff41jx47hyJEjEAQBKSkp6NSpky/WR0REPuC6xqKhXaHcS4Wqek6JhYEFEVGgaXDlXOfOndG5c2c510JERI2g3GaXdgycUqEa2BVKDCzq6wrlOIezLIiIApVbgcX8+fPx3HPPITg4GPPnz6/z3FdffVWWhRERkW8UV9ktqFq8LXWF8jAVqkRKhar/V0pwRfDBdrNERIHHrcAiNTUVZWVl0sdERNR8GSt2JHRqJbTqylK7MGnHomGpUEHa+sv2DKyxICIKWG4FFps3b3b5MRERNT+uCrcBIKRi8rancyzMHhVvi6lQDCyIiAKNx12hpk2b5nKORXFxMaZNmybLooiIyHcqC7c1TsdDG7hjUSLtWNT/XpVBmmPBGgsiokDjcWDx4YcforS0tMbx0tJSfPTRR7IsioiIfKfIIs6wcA4ExM+tNru0C+EOtpslIiLAg65QRqNRGohnMpmg1+ul+2w2G9avX4+YmBifLJKIiOTjauo2AARr1VAoAEFwpEu5M/AOaGC7WQYWREQBx+3AIiIiQhoZ36VLlxr3KxQKLFq0SNbFERGR/IwuZlgAgFKpQIhWDZOlHCZzOaJCdG5dT9yx8GjyNlOhiIgCjtuBxebNmyEIAq688kqsW7cOrVu3lu7TarVISkpCQkKCTxZJRETyKZJ2LDQ17gvRi4GF+7MsPNmxYPE2EVHgcjuwGDFiBADg1KlTSExMhFLpcXkGERE1AWLQUH3HQjyWVehZZyipxsKDVKhSBhZERAHH48nbSUlJKCgowB9//IGcnBzY7Xan+ydPnizb4oiISH61tZt1HHPsYhg9CSys7hdvs8aCiChweRxYfPvtt7jrrrtQXFyM0NBQKBQK6T6FQsHAgoioiTPVUmMBVBZ0e5IKJdZLuLdjUZEKxcnbRF4JCQnBsmXLahwj8ieP85keeughaZZFQUEBLl68KN0uXLjg8QKWL1+O5ORk6PV69O/fH9u2bav13KysLNx5553o2rUrlEolHnzwQZfnrVu3DikpKdDpdEhJScGXX37p8bqIiAJVZSpUzRoLMdgQdzXcYS5z7Fx7smPBORZE3gkKCsLs2bOdbkFBQf5eFrVwHgcWmZmZmDt3LgwGg9dffO3atXjwwQfxxBNPIDU1FcOHD8e1116L9PR0l+dbLBZER0fjiSeewCWXXOLynB07dmDixImYNGkS9u3bh0mTJuG2227Drl27vF4vEVEgqK3dLFAZbLg7JK/cZofV5ggs3CveZioUEVGg8jiwGDNmDPbs2SPLF3/11Vcxffp0zJgxA927d8eSJUuQmJiIt956y+X57du3x+uvv47JkycjPDzc5TlLlizB1VdfjQULFqBbt25YsGABRo0ahSVLlsiyZiKi5q6uVKjK6dvupUKVVklp8qTdLIu3iYgCj8c1Ftdddx0efvhhHDp0CL169YJG47yVPm7cOLeuY7VasXfvXjz22GNOx0ePHo3t27d7uizJjh07MG/ePKdjY8aMYWBBRFShzuJtnWepUGJgoVAAOnX971UZNI7rFzMViogo4HgcWNx7770AgGeffbbGfQqFAjabe+9C5eXlwWazITY21ul4bGwssrOzPV2WJDs72+NrWiwWWCwW6XOj0djgr09E1NS5U2PhblcoaYaFRuXUzKM2YiqUucwOu12AUln/Y4iIqHnwOBXKbrfXenM3qKiq+i8iQRDc+uUk5zVffPFFhIeHS7fExESvvj4RUVMlCEKdOxYhFcGGu3MsPJlhATjXYZSyMxQRyeTYeRPSzhb4exktnldT7sxmc4MfGxUVBZVKVWMnIScnp8aOgyfi4uI8vuaCBQtQWFgo3c6ePdvgr09E1JRZyu0oswkAaive9qzGQizCdjewqNo5igXcRA2Xl5eH6Ohop1teXp6/l+UXdruA29/didve2YHCEvdbZZP8PA4sbDYbnnvuObRp0wYhISE4efIkAOCpp57CihUr3L6OVqtF//79sXHjRqfjGzduxNChQz1dlmTIkCE1rrlhw4Y6r6nT6RAWFuZ0IyIKRMaKgEGhAIK1tddYuNsVyuzBcDwAUCoV0rks4CZqOEEQkJeX53QTBMHfy/KLvCIL8outsJbbcfZiib+X06J5HFi88MILWLVqFV5++WVotVrpeK9evfD+++97dK358+fj/fffxwcffIDDhw9j3rx5SE9Px8yZMwE4dhKqD9xLS0tDWloaioqKkJubi7S0NBw6dEi6/4EHHsCGDRvw0ksv4ciRI3jppZfw888/1zrzgoioJRFTnEK0apf1DWLdhafF20EugpTaSNO3y1jATUTeyygolT4+b2x4Ng15z+Pi7Y8++gjvvvsuRo0aJQUAANC7d28cOXLEo2tNnDgR+fn5ePbZZ5GVlYWePXti/fr1SEpKAuAYiFd9pkXfvn2lj/fu3YtPP/0USUlJOH36NABg6NChWLNmDZ588kk89dRT6NixI9auXYtBgwZ5+q0SEQWculrNVj3u7o6FlAqlcf99qiCtCihmKhQRySPzYtXAwlLHmeRrHgcWmZmZ6NSpU43jdrsdZWWe57XNmjULs2bNcnnfqlWrahxzZ5tvwoQJmDBhgsdrISIKdOJOREgtgUVIlcnbNrsAVT1dm6QdCzdToQDOsiAieWVUCSyyuWPhVx6nQvXo0QPbtm2rcfzzzz932k0gIqKmp65Ws47jlQGHO7MmpHazHqRCiWlT3LEgIjlkFlTWVeQwsPArj3csnnnmGUyaNAmZmZmw2+344osvcPToUXz00Uf47rvvfLFGIiKSibGeVCidWgWtWglruR0mcznCaglAROKOhTtTt0WGinNLOCSPiGTgnArFwMKfPN6xuOGGG7B27VqsX78eCoUCTz/9NA4fPoxvv/0WV199tS/WSEREMpGKt120mhVVdoaqP721csfC81Qo7lgQkRwyWGPRZHi8YwEAY8aMwZgxY+ReCxER+Vhl8XbtOxGhejXyi61uDcnzdEAeABh0TIUiInkIgoBMdoVqMjzesejQoQPy8/NrHC8oKECHDh1kWRQREflGkUWssahjx6Ii6HCnM5S4Y9GQVKhSpkIRkZculpQ5vUkhzrMg//A4sDh9+jRstprvMlksFmRmZsqyKCIi8g1px6KOVCgxTcroRipUSQNSoYKYCkVEMhHrK6JCdNCoHF3scouYDuUvbqdCffPNN9LHP/30E8LDw6XPbTYbfvnlF7Rv317WxRERkbxMlrqLt6ve586QPLMX7WYZWBCRt8SOUG1bBUGnViKzoBTnjWa0iQjy88paJrcDixtvvFH6eMqUKU73aTQatG/fHv/3f/8n28KIiEh+4o5FSB01FiEeDMkTOzt5VGPBORZEJBOxcLttqyAoFXAEFoWss/AXtwMLu92Rr5acnIzdu3cjKirKZ4siIiLfqJxjUfuPf7HFrEfF2x7sWEhzLMoYWBCRd8TAok2rINjsjiHKLOD2H49rLBYtWoTQ0NAax61WKz766CNZFkVERL5R5EaNRajeg3azZY43nRqUCuVGqhURUV3EjlBtWxkQG6YHAJw3scbCXzxuN3vPPffgmmuuQUxMjNNxk8mEe+65B5MnT5ZtcUREJC932s2G6NxPhRI7O3GOBVHjMhgMeOaZZ2oca2mkVKiIIOmNE+5Y+I/HgYUgCFAoFDWOZ2RkOBV0ExFR0yMWZIe4027WjR0FafK2J12hxMnbTIUiarDg4GAsXLjQ38vwu8yLjuLtNq2CcLHECoCBhT+5HVj07dsXCoUCCoUCo0aNglpd+VCbzYZTp07hmmuu8ckiiYjIeza7IAUW7nSF8tXk7eCKHRHOsSAibxjNZTBW7FK0iQhCbkUKFKdv+4/HXaHS0tIwZswYhISESPdptVq0b98enTp1kn2BREQkj+IqL+TrCiw86QolBhaeFW8zFYqIvCfOsGhl0CBYp66sseCOhd+4HViIeXzt27fHxIkTodc7nrzCwkJ88skn+M9//oN9+/a5HJ5HRET+JwYKWpUSOnXtgUCYm3MsBEGo7ArFdrNE1Mgyq3SEAoDYMB0Ax8+6Ems5DFqPM/7JSx53hZoyZQr0ej02bdqEu+++G/Hx8Vi6dCnGjh2LPXv2+GKNREQkA3dazTrur6ixqGfHwlJuR0V3R8+6Qmkq2s0ysCAiL2RU1Fe0jXAUrYfo1NIbF0yH8g+PQrmMjAysWrUKH3zwAYqLi3HbbbehrKwM69atQ0pKiq/WSEREMigy11+4DVTtClVWa8MOoHLqNtCwVKjSMhvsdgFKpevrExHVRWw1K+5YKBQKxIbpcSqvGOeNZiRHBftzeS2S2zsWY8eORUpKCg4ePIilS5fi3LlzWLp0qS/XRkREMqpsNVvfjoXj/jKbAEu5vdbzxB0HrUoJtcr9DfCqhd6l7AxF1CAXLlxAjx49nG4XLlzw97IaVdWp2yIxHYp1Fv7h9o7Fhg0bMHfuXPzjH/9A586dfbkmIiLyAbF9bKiu9hkWABCsVUOhAATBEYzoa9mNkFrNajzLqq26u1FitUldoojIfTabDYcOHapxrCWRdiwiqgYWjhrgHKZC+YXbvw22bdsGk8mEAQMGYNCgQVi2bBlyc3N9uTYiIpKRWGNRXyqUUqlAiLb+lrOVrWY9CwyUSoUUjLCAm4gaqnrxNlAZWGRzx8Iv3A4shgwZgvfeew9ZWVm4//77sWbNGrRp0wZ2ux0bN26EyWTy5TqJiMhL7qZCVT2nrs5QDekIJQquCEZKyjjLgog8V2ItR36xYyBe21aVE8djQpkK5U8ed4UyGAyYNm0afvvtN+zfvx8PPfQQFi9ejJiYGIwbN84XayQiIhmIxduhbqQeuTPLQtxtqC1Vqi6cZUFE3jhXkQYVqlMjPKgyvTMunKlQ/uRxYFFV165d8fLLLyMjIwOrV6+Wa01EROQDle1m666xqHpOXYFFSQOmbos4y4KIvJHhIg0KqEyFOm/ijoU/eBVYiFQqFW688UZ88803clyOiIh8QCzerq/GAqhMhaqrxkJsN+tJq1lRkJazLIio4Vx1hAKA2NCKGotCMwRBaPR1tXSyBBZERNT0eVJjUTnLov4di4bUWBg0YioUayyIyHOuOkIBQExFu1lLuR3GUv58aWwMLIiIWgipxsKDVCi3ircbsGNhYI0FEXmhcsfC4HRcr1EhwuD4+cV0qMbHwIKIqIUwWSpqLNwo3g7zIBWqITUWLN4mIm9kXiwBULPGAqhMh2JnqMbHwIKIqIWQPxXKcV9DukJVFm8zVYGIPFdbKhRQmQ6VXcjAorExsCAiaiHEVCiPirfrSoWy2gE0sMaCxdtE1ECWchvOV7STrV68DQBx4vRtE1vONjYGFkRELYSpATUWdc6xqBhuZ2CNBRE1oqwCx06EXqNE62BtjfullrNMhWp0DCyIiFoAS7kNVptjh8GtVCg3aixKvekKxTkWRNRAVdOgFApFjftjwzh9218YWBARtQBVdx6Cte6nQhXVuWPR8MBCmmNRxsCCiDyTUVG4Xb0jlCimYscim9O3G139v12IiKjZEwOLEJ0aKmXNd/iqC9W5P3nbm3azLN4mahi9Xo9Zs2bVONYSZNYydVsk1Vhwx6LRMbAgImoBiqoEFu7w9eRtMbAotnDHgqghQkND8eabb/p7GX5R29RtUWyV4m27XYDSjTdTSB5MhSIiagHEAMGd+oqq5xVbbbDZBZfneDN5WwxGmApFRJ7KqKPVLABEhWihUAA2u4C8YqZDNSYGFkRELYDYNtadVrPVz6tt+rZ3k7cd12cqFBF5KrOeHQu1SomoEEcBdw7rLBoVAwsiohbAk1azAKBTq6BVO35F1BZYmK3i5G3Ps2o5eZuIGqLcZkd2Re1EbcXbQGWdBTtDNS4GFkRELUCRh6lQABBWT51FidQVyvNfJcE6tpslIs9lG82w2QVoVUpEV+xKuFLZcpY7Fo2JgQURUQsg7Vi4WbwNVBZ619YZSgwK9A1JhdJw8jYReU5Mg4qP0NdZlF3ZcpY7Fo2JXaGIiFoAscbCkx0LMW3K1SwLm12ApdwxcM+bVKjSMhu7thA1QEFBAW688UanY1999RUiIiL8sp7GUl9HKFFsKFvO+gMDCyKiFqByjoV7NRZAZRBidJEKZa7SzcmbdrMAYC63NSg4IWrJysrKsHXr1hrHAl1mPR2hRHHhnL7tD0yFIiJqATxtNwvUnQpVNYVJr/H8V0nVYISzLIjIXeLU7TYRtRduA5WpUKyxaFwMLIiIWoAib1KhXHSFqjocT6HwPI1JqVRIAQkLuInIXeKOhbupUNyxaFwMLIiIWoDKdrOeBBa1d4USZ1gYGjAcTySmP5WUcZYFEblHLN5uU19gUdEVKr/YCmtFPRj5HgMLIqIWoDIVyvMai7pSoRrSEUokTd/mjgURucFuF3CuQJxhUXdg0TpYC43KsZuaW8R0qMbCwIKIqAUokoq3Pd+xcNUVSkxfCvJix4KzLIjIE7lFFlhtdqiUCmkAXm0UCgVimA7V6BhYEBG1AA1JhRI7SBldBRYV6UvepEIFaTnLgojcJ7aajQvTQ62q/yWsNCSvkIFFY2FgQUQU4Ox2AUXWih2LBtRYFFlc1FhYHTnL3qRCGaRUKNZYEFH9pI5Q9aRBiWLDuGPR2BhYEBEFuGJrOQTB8XGYTDUW8hRvMxWKiNwndYSqZ4aFSAosTKyxaCwMLIiIApzYLlajUkCndv/Hfp2BRcUuQ0OG44nE+gymQhGROzLc7Agl4o5F42NgQUQU4ExVCrc9mTlR1xyL0ipzLBrKoGUqFBG5T2w1W19HKJFUY8HAotEwsCAiCnANaTXrOL9yjoUg5lJVKJGhK5SBxdtE5AExFaq+qduiWE7fbnQMLIiIApypAa1mq55fZhNgqTZgSo4dC6ZCEZG7BEGQirfd37FgKlRjY2BBRBTgGtJqFgCCtWqImVPV6yzEgmtvireDWbxNRG66UGyFuczxBkd8RN0zLERiKpTJXM6Uy0bi2W8ZIiJqdsQaCU9ToZRKBUJ0apjM5TCZyxAdqpPuE4MBvRxzLMoYWBB5SqvVYsKECTWOBSoxDSomVAed2r2fOyE6NQxaFUqsNpw3WpAcxZe9vsa/YSKiAFdZY+H5j/xQKbCotmMhtpuVoXi7lO8kEnksPDwcn3/+ub+X0WgyPCzcBhzTt+PC9DiZV4zzRjOSo4J9tTyqwFQoIqIAV9TAVCjHY1x3hiqVpXibNRZE5J5MqdWse4Xbohh2hmpUfg8sli9fjuTkZOj1evTv3x/btm2r8/ytW7eif//+0Ov16NChA95++22n+1etWgWFQlHjZjbzHxQRtUzGBhZvA5WTusVdD5G4Y+HN5O0gDQMLInKPNHXbzeF4IrGAO4edoRqFXwOLtWvX4sEHH8QTTzyB1NRUDB8+HNdeey3S09Ndnn/q1CmMHTsWw4cPR2pqKh5//HHMnTsX69atczovLCwMWVlZTje93r1CHyKiQFNZvO1ZjYXjMY7AwlgtFapEKt5ueEZtZbtZpkIRUd2kqdsepEIBlYFFNncsGoVfayxeffVVTJ8+HTNmzAAALFmyBD/99BPeeustvPjiizXOf/vtt9GuXTssWbIEANC9e3fs2bMHr7zyCm655RbpPIVCgbi4uEb5HoiImroii2O3IcSbVKhqgYWZ7WaJqBF5OnVbxJazjctvOxZWqxV79+7F6NGjnY6PHj0a27dvd/mYHTt21Dh/zJgx2LNnD8rKKrfpi4qKkJSUhLZt2+L6669Hamqq/N8AEVEzIe5YhDUgsBDTp2or3pajxoLtZomoPmKNRaLHgYWjxoKpUI3DbzsWeXl5sNlsiI2NdToeGxuL7Oxsl4/Jzs52eX55eTny8vIQHx+Pbt26YdWqVejVqxeMRiNef/11DBs2DPv27UPnzp1dXtdiscBiqfwHZzQavfzuiIiajsp2s57/yBeDEXHXQyRN3vZixyKYk7eJGqywsFDK+BC9//77CA8P99OKfKewtAymip9jCQ2ssThv4o5FY/B7u1mFOH2pgiAINY7Vd37V44MHD8bgwYOl+4cNG4Z+/fph6dKleOONN1xe88UXX8SiRYsatH4ioqaucvJ2w2ssqu9YmGXoCiU+trTMBrtdgFJZ+89+InJmtVrxv//9z+nY8uXL/bQa3xJ3K1oHaz2u64oNraixKDTX+xqTvOe3VKioqCioVKoauxM5OTk1diVEcXFxLs9Xq9WIjIx0+RilUomBAwfi2LFjta5lwYIFKCwslG5nz5718LshImq6vJlj4SoVShAEaaidN5O3qz7WXM5dCyJyTewI5WnhNlDZbtZSboexlI0ifM1vgYVWq0X//v2xceNGp+MbN27E0KFDXT5myJAhNc7fsGEDBgwYAI3G9TtxgiAgLS0N8fHxta5Fp9MhLCzM6UZEFChMXrSbFYu3TVXmWJTZBNjsjt1iOdrNAkyHIqLaiR2hPG01Czh+RkUYHD/HmA7le35tNzt//ny8//77+OCDD3D48GHMmzcP6enpmDlzJgDHTsLkyZOl82fOnIkzZ85g/vz5OHz4MD744AOsWLEC//rXv6RzFi1ahJ9++gknT55EWloapk+fjrS0NOmaREQtibXcDku5HQAQ5kW72apzLMTCbcC7HQulUgG9xvFriAXcRFQbqSNUAwILoDIdip2hfM+vNRYTJ05Efn4+nn32WWRlZaFnz55Yv349kpKSAABZWVlOMy2Sk5Oxfv16zJs3D2+++SYSEhLwxhtvOLWaLSgowH333Yfs7GyEh4ejb9+++PXXX3HppZc2+vdHRORvVSdmN6TdbIiLGgsxCFArFdCovHt/yqBVw1xmRTFnWRBRLcQai4akQgGOdKij503ILmRg4Wt+L96eNWsWZs2a5fK+VatW1Tg2YsQI/Pnnn7Ve77XXXsNrr70m1/KIiJo1cafBoFVB1YDi6DAXcyxKZZhhIeL0bSKqj5QK1crQoMfHidO3TWw562t+TYUiIiLfqpy63bD3kSqLtytTocRJ2d50hBJxlgUR1ceb4m2AQ/IaEwMLIqIA5k3hNlAZkBRbbVLBtlmG4XgiA6dvE1Edii3luFjieGPD06nbInFIHgML32NgQUQUwCpbzXpeuA0412WI9RpyDMcTGaQheayxIKKaxDSoUL26QQ0oACCmYscim9O3fY6BBRFRAPNm6jYA6NQqaNWOXxVikFIqw3A8EVOhiKgulYXbDauvAKrUWHDHwucYWBARBTBvaywAIKxaZ6hSGYbjiYKYCkVEdcjwYoaFKLZK8ba9IqWTfIOBBRFRAJN2LHQNSyEAKtOoxGuVypoKVbFjUcbAgohq8rZwGwCiQrRQKACbXUBeMdOhfImBBRFRADNWpC81ZIaFqHpnKDEI8GbqtkissSi2sMaCiGrydoYFAKhVSkSFOAq4c1hn4VMMLIiIApgcqVCh1VKhxLQlpkIRka9lypAKBVTWWbAzlG/5fUAeERH5TpGX7WaBmoGFWcYBeQYNi7eJGkKj0WDEiBE1jgWaDBmKtwFHy9n9mcB57lj4FAMLIqIAJqYvNbRNIwCEVNRnSMXbUlco73+FSDsWrLEg8khERAS2bNni72X4lLnMhtyKadkNnWEhqmw5yx0LX2IqFBFRAPO23WzVxxZZHEFKiYw7FsEVOymlnGNBRNWcq0iDCtKo0Mrg3W4MW842DgYWREQBTJq8LWO7WbO0Y+H9rxBO3iai2oj1FW1bBUGhUHh1LU7fbhwMLIiIAlhl8bYXqVC1FG/LkgqlYWBBRK6JHaG8TYMCKlOhWGPhWwwsiIgCmFhj4V3xdrUaCzmLt7ViKhQDCyJyJhZue9sRCgBiQ9kVqjEwsCAiClCCIEg1FmE+mGMhZ7vZYtZYEFE1lalQ3nWEAoC4cEdgkV9shbXc7vX1yDV2hSIiClAlVhvsguNjb2osqreb9cnkbe5YEHnEZDLhscceczq2ePFihIaG+mlF8pMzFaqVQQONSoEym4DcIossuyBUEwMLIqIAJQYCKqXCqyBATIUSdz+kVCgZdixYvN0wZy+U4HhuEUZ2jfH3UshPzGYzli9f7nRs4cKFARNYCIKAMxeKAciTCqVQKBATqkdmQSnOG80MLHyEqVBERAFKbA8bqld71VGlcseiIhVKxh0LMTgpLbPBLm6vUJ0EQcD0D3fjnpW78duxPH8vh8gnTuYV47zRAo1Kga5x8gRLUmeoQtZZ+AoDCyKiAGWUYeo24JwKJQiCrDsWwVU6S5nLuWvhjoPnjPj7fBEA4Nt95/y8GiLf+OXweQDA4A6RXv8ME4l1Fizg9h0GFkREAUqOVrNVH19uF2Apt6OkotBalh2LKtdgOpR7vvsrS/p4w6FslNtYiEqB5+fDOQCAUd3kS/eLETtDmdhy1lcYWBARBagiMbDw8t0+g0YFMZPKWFoGc5njhawcOxZKpQJ6jeNXEQu46ycIAr77q3KX4mJJGXaduuDHFRHJr6DEir1nLgIARnWPle26sWHcsfA1BhZERAFKrIkI9aIjFOB48S+mIuQWVb7TJ0e7Wcd1HNfmjkX99mUUIuNiKQxaFcb3SQAArN+fVc+jiJqXLUdzYbML6BobisTW3reaFXH6tu8xsCAiClBiFydvAwsACKtIh8qpkkKgV8sTWIjpUJxlUb/vKmoqRnWPxU192wAAfjp4HjYWvlMA+bmivmJUd3m7nsVx+rbPMbAgIgpQUvG2DIGFuGORU/FOn06thFLZ8E5TVXGWhXvsdkGqr7i+dzyGdoxCmF6NvCIL9pxmOhQFhjKbHVv/zgUgf2ARw1Qon2NgQUQUoIpkKt52XEMMLBzv9MmVBlX1WkyFqtve9IvINpoRqlNjRJdoaNVKXJXiyD//4UC2n1dHJI/dpy/AZC5H62At+iS2kvXaYiqUyVwuNaEgeTGwICIKUGKNhRytGsVdDzEVSo6OUKIgKbDgL/q6iGlQV/eIhb7i739sz3gAwE8HszkHhALCLxXdoEZ2jYFKpl1RUYhOLb2RwXQo32BgQUQUoMR2s2EypEKFSjUWjhQCOTpCicTibaZC1c5mF7C+Ylfiht4J0vHLOkchWKtCVqEZaRkFflodkTwEQZDmV1wlcxoU4Ji+Hcd0KJ9iYEFEFKDE4m05aixCq+9YMBWqUe06lY9ckwXhQRoM6xQlHddrVFI7zh+ZDkXN3IncYpzOL4FWpcTwLtE++Rox7AzlUwwsiIgClNRuVidDjYXOucZCzlQoqXi7jIFFbcSi7Wt6xEGrdv7VfW3POACOtrOCwHQoar7E3YpBHVrLNm27OnGWRQ5ToXyCgQURUYAyydhuVrxGrrRjId8v/co5FqyxcKXMZscPFbMqrr8kvsb9I7pGQ69RIuNiKQ6eMzb28ohk88sR+adtVycGFtncsfAJ34SDRETkdyYZ282KNRZWW8XUbY1870sFMRWqTttP5ONiSRkig7UY0iGyxv0GrRoju8bghwPZWL8/Cz3bhPthldTYVCoVUlJSahxrrnw1bbs6Tt/2LQYWREQBqkgq3vY+Fap6WoJBzh2LirSqEgsDC1fEblDX9IyDWuU6oLumZxx+OJCNHw5k4+ExXaFQyNtNh5qe1q1b4+DBg/5ehmx8NW27OrHlLFOhfIOpUEREAajMZpdqFuTIVa6eTqX3RbtZ1ljUYC2346eDFd2gLkmo9bwru8VAq1biVF4xjp43NdbyiGTjq2nb1Uk7FibuWPgCAwsiogAk7lYA8qZCieQdkOdYX2r6Rew6mS/bdQPBtmO5MJrLEROqw8D2rWs9L1SvweWdHd2iftjP7lDUvDhP2/ZdGhQAxIZW1FgUmtnswAcYWBARBSCx1WyQRgVNLekznqi+YyFnV6h+SREwaFXIuFiKie/uxD0r/8DhLBYhA5XdoMb2iq93WNi1FcPyfjiQ5fN1EclJnLYdGaxFn8QIn36t2HAd9BolLOV27D590adfqyViYEFEsvnl8Hnc/u4O/G9vBt8J8jOjOHVbht0KwEVgIeOORbe4MGz+1xW4a1A7qJQKbD6ai7FvbMO8tWk4e6FEtq/T3JjLbNggpUHV7AZV3VXdY6FWKvD3+SIczyny9fKIZCNO277CB9O2q9OpVbi5X1sAwPvbTvr0a7VEDCyISBYf/HYKMz7ag50nL+Bfn+/DvR/tQQ67bviNmAolR6tZx3WcU6Hk3LEAHHnPL9zUCz/PH4HresdDEIAvUzNx5f9twcJvDiKvqOUVWm45moNiqw0J4Xr0TWxV7/nhhsrheT9y14KaCV9P23Zl2rBkAMDGw+dxOq+4Ub5mS8HAgoi8YrMLWPjNQTz73SEIAnBZpyhoVUr8fDgHo5f8iq/TMrl74Qdiq9lQmYZMVS8Al3PHoqrkqGC8eWc/fDvnMgzvHIUym4BV20/j8pc349WNf0tD/1qCbyvSoK7rHQ+lm+/iisPyfmjgFO59Zwvw86Hz/D/bDBQXF2PhwoVOt+Li5vciuTGmbVfXKSYEV3aLgSAAK38/1Shfs6Vgu1kiarBiSznmrk6VhhotuLYb7ru8A/4+X4SHPk/DgUwjHliThh8PZOP5G3siMkTn5xW3HCZLxdRtGVrNAoBWrYRO7chLBuQt3nalV9twfDx9EH4/noeXfzyCfRmFeOOXY/jvzjOYM7ITru8dj+hQnddtVc1lNhzOMmJ/ZiEyC0oxOiUW/ZNqL5JuLCXWcmyqSA+pqxtUdaN7xOGJrw7g4Dkj0vNL0C7S/bad20/kYcoHf6DMJmBk12i8dEtvxFR00KGmp6SkBIsWLXI6Nnv2bAQHB/tpRQ3TGNO2XZl+WTI2HcnBZ3syMP/qrgg3yPOzsqVjYEFEDXLeaMa0Vbtx8JwROrUSr03sg7G9HHngXeNC8eWsYVi++QSWbjqGHw5k449TF/DCTT1xTc/6c8XJe2IqlJy/qEP1aliKrADkbTdbl2GdovDV7GH44UA2XvnpKE7mFePZ7w7h2e8OITxIg66xoegSF4KusaHoHBuKrrGhaBWsdXkta7kdf5834a+MQvyVUYC/Mgrx93kTyu2V786/s/UkBndojdkjO+GyTlF+mwfxy+EclJbZ0K61Ab08GHjXOliLQcmtsf1EPn44kIX7R3R063HHzptw/8d7UWZz/F1sPpqLMUt+xYs39+L/WfIpsb7iKh93g6puaMdIdIsLxZFsEz79Ix3/uMK9/ytUNwYW1GR9tvssDp4rxD+u6IS4cL5r1pQczjJi2qrdyCo0IzJYi/emDEC/ds454BqVEg9c1RmjusfgX5/vw5FsE2b+90+M75OAReN6IMLg+sUfycMoc42F41oa5FUEFnLXWNRFoVBgbK94jE6Jxed7M7Dy91M4nlOEwtIy/HH6Av44fcHp/OhQXUWgEYLEVgacyivGXxkFOJxlkiaHVxUZrEXvtuEI0Wvw44Es7Dx5ATtP/oFL2oZj9shOuKp7rNupSHL57i/HULzre8d7HNxc2zMO20/kY/2BbLcCixyTGVNX7obJXI5+7SKwaFxPPLruLxzKMmLmf//ELf3a4plxKbIMWiSqqqDEij1nHP9/fT2/ojqFQoEZwzvgX5/vw4fbT2PG8GRZOui1dAwsyCuHs4xY/Uc6zl4owbPje8o2LfPrtEw8su4vAMC6PzPxr9FdMGlIe593i6D6bTmag9mf/Iliqw0do4OxcuqldaZb9GwTjq/nDMMbvxzDW1tO4Ou0c9hxIh+Lb+mFK7s17jtULYnYblauVCjHtSp/Zfg6FcoVtUqJOy5thzsubQdzmQ0nc4vx93kTjp434e9sx58ZF0uRa7Ig12TBb8fzalwjPEiD3m3D0bttOHq1iUDvtuGID9dLL97PFXTDu7+exJrd6diXUYj7Pt6LrrGhmDWyI67rFV/r5Gs5mcxl2HzU0dP/+t7up0GJxvSIw9PfHMS+swXILChFm4igWs8ttpRj+qo9yCwoRftIA96fMhCtg7X4avYwLPn5b7y99QTW/ZmBnSfz8X+3XYLBHSIb/H0RVbflaC7sAtAtLhRtW/lu2nZtbrgkHi/9eATZRjPW78/C+D5tGn0NgYaBBXms1GrDd3+dw6d/pCM1vUA6PmXlH/jfzKFoXUsagrv+TL+Ih//nCCriwvTINpqx8NtD+CI1E/++qRd6epAWQPL6ZNcZPP31QdjsAoZ0iMTbd/d3Ky9Vp1bh4THdcHVKHB76LA0ncosxbdUe3NKvLW7p3wY9EsIRHsR3Q+VkkrndLOCcVtVYqVC10WtUSEkIQ0pCmNPxYks5juUUSYHG2QslSI4KRq+24ejdJgKJrYPq3AFIiAjCwnE9MOfKTvjgt1P4eMcZHD1vwgNr0vDqxr8xc0RH3NyvDXRq333/Gw+dh7Xcjg7RwegeH+rx42PC9BiQ1Aq7T1/EjweyMf2yZJfn2ewC5q5Oxf7MQrQyaLDqnkuln99atRKPXNMNV3aLwfzP9iH9QgnueG8n7hveAfNHd/Hp908thzht+8pujbtbIdKpVZgyJAmvbPgb7207iXGXJPgt/TFQMLAgt/193oRPd6Xjiz8zpDQLtVKBq1Nise9sAU7mFmPaqt349N5B0iRdT2UWlOK+j/bCWm7H1SmxeOuufliz+yxe+vEI/sooxLhlv+GeYcmYf3UXBDdikVcgspbboVTArXdg7XYBi388gnd/dfT8vqVfW7x4cy9o1Z69e9snMQLfzx2O/9twFO//dgrr/szAuj8zAADtWhvQs00YeiSEo2ebcPRMCGOxtxfEGoswWVOh/Ltj4Y5gnRp9EiO8HrIVFaLDI9d0w/0jOuLjHaex4rdTOJNfggVf7MfrPx/DfZd3wOQhST7ZwRCH4l3fu+Evcq7tGV8RWGS5DCwEQcCibw/ilyM50KmVeH/KQLSPqln0O6B9a6x/YDie/+4Q1uw+i3d+PYmtf+fitYl90D0+rMb5RO5qzGnbdblzUBKWbT6OA5lG/HHqAgZxV84rfGVGdTKX2bB+fxY+3ZWOPWcqJ1S2bRWEOy5th1sHtEVMqB7Hc0y45a0dSDtbgDmfpuLdSf09/oVbbCnHjA/3IK/Igu7xYVgysQ/UKiXuHpyE0T1i8dx3h/HtvnNY8dsprN+fhUXjemB0jzi5v+WAd/ZCCZZuOoYv/sxEuV1AsFaFUL0GoXo1woIcf4bqNQir+DNUr0ZqeoH0ztL8q7vgn1d2avALHr1GhSeuS8GYHnFY8dsp7M8sRMbFUqRfKEH6hRKs31/ZJjMhXI8ebcLRMyEcPRLC0KNNGOLC9HxHyQ0mnxRvV+4q+ardbFMTHqTBnCs7Y9plyVj9x1m8++sJZBvNePa7QzicZcTLE3rL+u+xsKQM2445Xmzd0LvhRdPX9IzDs98dwp4zF5FjNNfo7vT+tlP4aMcZKBTAkol90D+p9jkZITo1Ft/SG6O6x+KxdX/hSLYJ45f9jodGd8GM4R1kTVEVBAEmSzm0KqXfd8XIt3afarxp23VpHazFzf3a4tNd6Xj/t1MMLLzEwIJqKLPZ8VdGAb7/Kxvr/sxAYakjpUKlVOCq7jG4c1AShneKcipm7BQTig+mDsCd7+3CpiM5ePzL/XjpFvd/4drtAh5Yk4bDWUZEhejw/pQBTjsSMaF6LL2jL27p1wZPfX0AZy+U4r6P92J0SiwWjuuBhDpyiMnh7IUSLNt0HOv+zHDqglNstaHYakO2se7Ha1VKvDyhN27sK08O6oD2rTGgvaOtZ0GJFQfPGXEgsxAHzhlxMLMQJ/OKca7QjHOFZmw8dF56XCuDBikJjp2NlHhHKkyHqOBGyX1vTqQ5FjLWWFQNUgyalvXrw6BVY/plybh7cDus3X0WC785iM/3ZqBLbCjuvbyDbF/np4PZKLMJ6Bbn6HLVUAkRQeiTGIG0swX46WA2Jg1pL923fn8WXlh/GADwxNjuuLaXewHM1Smx6Nvucjy2bj9+PnweL/5wBP/ddQYJ4UGIDNGidbAWrYN1iAx2fBwZrEXriuOtDFrY7AJyjBZkG80473Sz4LzRjByTBdmFZpSW2QA4XvAlROiREB6EhIggx8cRQYgPD0KbiCBEh+pqDWoEQYDVZoe13A5LeeWfgGOnXalUQKVQQKkEVAoF1Eql42OlAkqFAmqlgj9TfOznim5QI7v5ftp2faYNS8anu9Lxc8XAPFe7d+SelvWbgVwSBAFHz5vw+/F8bD+eh50n81FstUn3t4kIwu0DE3HbwETE1tHTvH9Sayy7sx/u/3gPPtuTgZhQPf41pqtba3j5p6P4+fB5aNVKvDu5f63Fhld0jcGGB0dg6aZjePfXk9hw6Dx+P56H+aO7YoqP0hKau7MXSvDm5uP4397KgOLyLtF4YFRnJEcFw2Qug7G03PGnuRxGcxlM5nKn4za7gLsGt/NZf/8IgxbDOkVJU4MBR43A4SxTRbBRiAOZhTiRW4yLJWX4/Xg+fj+eL52rVSvRLS5UCjS6xoYiPjwIMWG6Fvuup8kif1eoqmlVem3L/L+mU6sweUh72OwCFn17CP/+4TA6xgTL1ojg2yrdoLx1bc84pJ0twPr9lYHF3jMX8ODaNADAlCFJtdZf1CYqRIf3JvfHZ3vO4tlvD+HshVKcvVDq9VpduVBsxYViKw5kun7XQ61UIDZML81XcdxsUiDhLZ1aibAgDcIrbmF6teNPp2MaRBg0GNQhknViHhAEAb8cadxp23URB+ZtOpKDlb+fwqLxPf29pGaLgUULlXGxBNuP5+O343nYfiIfeUUWp/sjDBoM6xiFCf3b4vIu0W6/m3B1SixeuKkXFnyxH8s2H0dMmA6Tq7xT5sr/9mbg7a0nAAD/mdC7RtvS6oK0KjxyTTeM79MGj3+5H3vPXMRz3x3CN2mZWDF1IKKYlw/A8Ry/ufk4Pt9TGVAM7xyFB6/q4pT24G2xva+E6jW4NLk1Lk2uDGbMZTYcO1+EQ1mFOHTOiENZRhw6Z0Sx1VYxm6DQxXXUiA3TIyZUJ/0ZI/4ZqkPHmJCA/Dfji+JtcfdDqXDsYLVkU4e2x9/ni7D6j3TMXZ2GL2YNRRcvdhgAIL/Igu0nHAFzQ7pBVXdtz3i8+MMR7DqVj/wiC4xmR7qptdyOq7rH4ukbejQojUuhUGDiwHa4qnssDmeZkF9swYViKy4WW5FfEQyIf14otuJiiRXiIG+dWom4cD1iwypuoTrEhesRU/XjUD2s5XacKyzFuYKKW6G58uMCM7KNZpTbBWQWuBfUaNVK6Cr+zZbbBdgEAXa74LR7W52l3C51GKtPqE6NKUPbY9plyV7/TBUEATtO5OOng9kwmcthKbfDXGaDudwGS5kd5nIbzGV2mC7m13jsC98fwsDu7dEjIRzd4kKb7BsrJ3KLcaZi2vZlnRtn2nZ9ZnBgniwYWLQARZZy/F3RjvGvzEJsP56H0/klTufoNUpcmhyJYR0jMaxTFFLiwxrct/2OS9shx2jBaz//jWe+OYioEJ00OK263acvYMEXjg5Q/7yyk0et3rrGheLz+4dg7Z6zeHH9YezLKMSd7+3EJzMGIzo08F4ouiuzoBTLNh3H//aelYZdOQKKzk1iorA39BoVerUNR6+2lZ3B7HYBZy+WOAUax3OLcN5odvzyNZfDZC7C8Zwil9fUqBR46ZbeuLlf28b6NhqF2G5WzuJtMUgxaNUtvs5FoVBg0bgeOJlbhF2nLmD6h7vx9ezLvHpR+cOBbNjsAnq2CZMlFaNdpAE9EsJw8JwRa/ecxWe7z+JiSRl6tw3HG3f08Tr9JDJEh8s61/+z1mYXUFBihVqpRFiQe/92grQqhBs0tRaI2+wCckxmnCsww2YXoFMrpenwOo0KWpUSOo0SWpXjVtfvM3tFoGGzV9wEATabgCJLOQpLy2AsLYPRXFbxccUx6fMynMorxun8EizbfBwrfjuFuwe3w73DO3g8tbzIUo4v/szARzvO1PrzyunvoMRc49i6PzPx1RHHY1VKBTpGB6NnQnhl+mhCWJPYWfHXtO26DOkYie7xYTicZeTAPC80jWeTZGEpt+FEjqOv+5Fsk6O/e7bJ5Ts6KqUCl7QNx2WdojC0UxT6touQtX3g3FGdcN5kxqe70vHgmjS0DtbW6H9+9kKJNOn12p5xmHdVF4+/jlKpwB2XtsOg5Na4472d+Pt8Ee54byc+vXcQYkKb/1C9E7lFeOH7w8g1WaBWKaCp+CWpqfhYU/VjtRJF5nL8cCBLCigu6+QIKMRahkCkVCqQFBmMpMhgp1xxsQg0x2hGjtGCHFNlHrf457mCUmRcLMVDn+9DuU3AbQMT/fidyEcQhCrF2/LPsWiq74I2Nq1aibfv7o/xb/6O9AslmPnfvfjv9EEed0sDgN+O5WHJz38DkGe3QjS2VzwOnjPi5R+PAnA03lgxZWCDO/c1hEqpkL3Dm0qpQHy4o97CW0qlAkooUP2fdatgLdz5iWC3C9h4+DyWbTqO/ZmFeG/bKXy44wzuGJiI+0Z0rHOOCAAczzHhox1nsG5vhpSGHKxVYXzfNmgfaYBOrYJe4yhmFwMnvVqFEuMFXLXU+VpThrbHmWIVDp0zIr/Yir/PF+Hv80X4IjVTOqdNRBCiQnVobdCgVbAWrQ1ax58VtTCOWhkNWhm0CA/S+CTF2F/TtuuiUCgw/bJk/OvzfVi1/RSmX5bcoP/LLZ1CEITa9wFbKKPRiPDwcBQWFiIsrGm10xMEAbkmC07nl+B0fjHO5BfjVF4xjmabcDq/BLZatnVjw3ToGheGbnGhGFSRXiJnUacrNruAf/x3LzYcOo9QvRqfzxyCbnGOv0+TuQy3vLUdf58vQs82Yfjs/iFe/6I7nVeMO97biaxCMzpEB2P1vYPrrAlp6n4+dB7z1qZJufKeGNYpEg9e1QUDAzigkIPdLuCZbw7i451nAADP3dgTkwYn+XlVDScIAkrLbDhvtGDkK1sAAAcXjZGtNfOWozmYunI32rU24NdHRspyzUBw7LwJNy/fDpOlHLcPTMSLN/dye0en3GbHkp+P4c0txyEIQNfYUHx2/xDZ0jBO5BZh1P9tBeDYvfpi1lB0ivEuZYtcEwQBW//OxdJNx7G3oouiRqXALf3a4h9XdERSZOUuVLnNjp8P5+CjHael9DcA6BgdjClD2+Omvm3q/R2dm5uLmBjn+oScnBxER0dDEAScN1pwILMQB88ZcfCc4093U8eqUiocQXTlG1vKis8V0sdalRIhejW6xoaie3wYuseHoUN0sMtJ1heLrej//EbYBeC3R0f6ZTBebSzlNlz20mbkmixYMrGPbM1KmjtPXhczsHDB34GF3S4gx2SpEjiU4Ey+Y6v1TH4xSqoUVlcXplejW1wYusSFoGuco4i1S2wIIgz+yaM3l9kwacUu7D59EbFhOnwxaxjiwvSY8eFubD6ai5hQHb6ZcxniwuUJANLzHUOcMgtKkRzlCC7kunZjsdsFLN10HK9VvHt5afvWuH9EB9jsAspsAspsdlhtdpRXfOy4OT622QUM6xTlVJdAdRMEAc9/fxgrfjsFAHj6+hRM87CgtTEIgoBfj+Xhr7MFKCgtQ0GJIxWjsNSKgpIyFJSWobCkDFZbZdGqUgGc+PdY2dKWzhvNuOI/WzCqewyW3dlPlmsGis1HczB91W7YBeCp61PcKorOKizF3NWp2H3a8SL0jkvb4ZkbUmTfEbrt7R34K7MAq+65lJOzG4EgCNhxMh/LNh2XggalAhjfpw3uHpyEnSfz8cnOMzhXaJbuuzolFpOHtMfQjpFu/3+tK7CoTUGJFSdyi3ChuAwXi624UOKojRFrYRx/luFCsVXqCNlQWrUSXWJD0D3OEWikJIShe1wYNh/NwYNr09AtLhQ/Pni5V1/DF5ZtOoZXNvyNnm3C8O2cy1p82ifAwMJr/g4sFn5zEKu2n671fqUCaNMqCO0jg5EUaUD7yGB0iQ1F17hQxITqmtx/gsKSMtz6jmN3omN0MAZ1iMSnu9KhUyvx+cwh6N02Qtavd/ZCCW5/1xFcJEUasPrewc2mHa3JXIaHPtuHDRXtVScPScKT16VwO9bHBEHAyz8dxVtbHE0EFlzrGIzWVGQVluKprw5Ks0Tqo1EpEB6kwfg+bfDU9SmyrqXIUg6DRtXgGqxA9v62k3j++8NQKoAVUwdiZNfau91sOnIeD322DxdLyhCiU+PfN/fCuEvkS4GqylJuQ6nV5rc3mFqyvWcuYNmm49h8NLfGfa2Dtbh9YCLuGpxUb7qUK3l5eejevbvTscOHDyMqKqqWR3im3OaoURPfzCqzCbCW2ys/L698U8tSbseFYisOZxlxOMuII9kmqc6rOrGL1+yRHfHwmG6yrFVOF4qtGLr4F5jL7Fh732DOtQADC6/5O7BY8dsp/Hv9YbStCB7aRxqQFBmM9lGOPxNbGZrdC81zBaW45a3tyCqsLDZbdmdfWXOJq8q46Ni5OHuhFO1aG7D6vsEN+sHdmE7mFuG+j/fieE4RtColnr+xZ8Dk/DcHgiBgyc/H8PovxwAAD13dBf8c1dmva7LbBfx31xm8/ONRFFnKoVEpcH3vBMSE6RARpEWEQYOIIA3CDY7WlxEGLSKCNDBoVU3uDYaWQBAEPLruL3y2JwOhOjW+nF0z7chabsd/fjqC97Y5dsh6tgnDsjv6sW9+gNufUYg3Nx/HhkPZ6NU2AlOGJGFsr/iArVcSm2oczjLiUJbJ8WeVVCyFAvh2zmXo2Sa8niv5xxNf7scnu9JxdUos3ps8wN/L8TsGFl7yd2BhLrNBpVS4zE1szv4+b8KEt7bDaC7HvKu64IGrfPuiLbOgFHe+txNn8kvQtlUQVt87GImtm04uZ1WbjpzHA6sd9RSxYTq8fXd/9K2n7S75hrgNDgBzr+yEeVd38cuL9L/Pm/DYur/wZ3oBAKBfuwgsvqW31y1Nybes5Xbc/f4u/HH6ApIiDfhq1jC0qugUdfZCCeasTsW+swUAHC1rF4ztJmvjDGraBEFo0UF/YWkZjmQZoVUrm/TvOLE2SaEANj10BZJbeODPwMJL/g4sAtnZCyU4nlOEK7pGN8oP16zCUtz53i6cyitGmwhHcNEusukEF3a7gDc3H8erP/8NQQAGJLXC8rv7BURHq+bs3V9P4N/rjwAA7h/RAY9d063RXgxYym14c9NxvLX1BMpsAoK1Kjx6bTfcPSiJ6UfNRH6RBePf/B0ZF0sxuENrfDx9EH4+dB6PrPsLJnM5wvRq/OfWSzCmR5y/l0pEtZi2ajc2HcnB5CFJeLaFD8xjYOElBhaB5bzRjDve3YmTecVICNdj9X2Dnbpz+EuRpRz/+mwffjyYDQC4a1A7PHNDj2aX5haoVv5+Cou+PQQAmDYsGU9d393nwcXu0xfw2Lq/cCK3GIBjIu2z43s2mxohqnQ024Sbl/+OYqsN3eJCcSTbBADo2y4CS+/o26Q64RBRTduP5+HO93chSKPCjgVXtugaJU9eF/v9Fczy5cuRnJwMvV6P/v37Y9u2bXWev3XrVvTv3x96vR4dOnTA22+/XeOcdevWISUlBTqdDikpKfjyyy99tXxqBmLD9Fhz32B0jA7GuUIzJr6zE4fOGWEyl8FSbkNjxtaC4Ch0O55ThJve/B0/HsyGVqXE4pt74YWbejGoaELuGZaMF25yvEv1we+n8PTXB2GvY0qvN4zmMjz+5X7c+vYOnMgtRlSIDm/e2Q/vTR7AoKKZ6hoXijfu6AuFAlJQMXNER3x2/xAGFUTNgDgwr7TMhve3nUJekQVlVbrukWt+3bFYu3YtJk2ahOXLl2PYsGF455138P777+PQoUNo165djfNPnTqFnj174t5778X999+P33//HbNmzcLq1atxyy23AAB27NiB4cOH47nnnsNNN92EL7/8Ek8//TR+++03DBo0yK11ccciMOWaLLjzvZ045mKiqUalcPTnrujHra3yp7LiXeqq/1Fc/bcRBMAmCCiv0v613F7xZ5XPq4oJ1eHtSf3RrwnnmrZ0n+05i0fX/QVBAMb0iEXXilksCjgKEB0fK6p8XOW4Gzsc1nI7Vv+RjhyTBQBw+8BELLi2u2xzDMi/1vyRjrV7zmLuqM51dokioqZn3d4MPPT5PqdjwVoVwoM0CDdoER6kRkSQtuJzRxONML0awTrHLVRX+XGITo0QvbpZdtVrNqlQgwYNQr9+/fDWW29Jx7p3744bb7wRL774Yo3zH330UXzzzTc4fPiwdGzmzJnYt28fduzYAQCYOHEijEYjfvjhB+mca665Bq1atcLq1avdWhcDi8CVV2TBP/67V+ob70+Dkltj6R19EdOMh/i1FF+mZuChz/bBRxsWAIDkqGD8+6ZeGNKRrQ2JqH6lpaX44IMPnI5NmzYNQUHc5ZSLtdyOGR/tQWr6RZjMng+rrU2wVuUIPPRqhOgdwUioXo1QnQYh4sd6DUL1aoTp1QjRaRAVqpWGDDc2T14XyzOOtQGsViv27t2Lxx57zOn46NGjsX37dpeP2bFjB0aPHu10bMyYMVixYgXKysqg0WiwY8cOzJs3r8Y5S5YsqXUtFosFFotF+txoNHr43VBzERWiw+czh8JuF2Ct0ovbarNL/bkt5eLHjp7d9iqxd9U3oBVQ1DiuVCigUSmgVimhViqgVTv+1KiUUKsUUCsrp5XKNQ2ZfO+mvm0RG6bHTweyYRcAoWL/ShAqd7Ic/0wEiP9cPHnLpkPFtN1AbT1JRPIrKirCnDlznI7ddtttDCxkpFUr8dG0SwEANrsAY6k4mLRiKKl4K3EMFCwoKUORpVy6FVvKUWyxSZ/bKt6dKrbaUGy1STvV7ujZJgzf/XO4T75POfntlU1eXh5sNhtiY2OdjsfGxiI7O9vlY7Kzs12eX15ejry8PMTHx9d6Tm3XBIAXX3wRixYtauB3Qs2RUqmAXqniCzly29COURjaUZ7BU0RE1LyolAq0CtZK7aM9JQgCLOV2KeAwmR3BhslcDpO5TPrYaC6rOFaOoiofJ0eFyPwd+Ybf3zKtnoNcX49nV+dXP+7pNRcsWID58+dLnxuNRiQmcjAZEREREXlPoVBAr3G8oRkVovP3cnzGb4FFVFQUVCpVjZ2EnJycGjsOori4OJfnq9VqREZG1nlObdcEAJ1OB50ucJ9kIiIiIiJf81tvS61Wi/79+2Pjxo1Oxzdu3IihQ4e6fMyQIUNqnL9hwwYMGDAAGo2mznNquyYREREREXnPr6lQ8+fPx6RJkzBgwAAMGTIE7777LtLT0zFz5kwAjhSlzMxMfPTRRwAcHaCWLVuG+fPn495778WOHTuwYsUKp25PDzzwAC6//HK89NJLGD9+PL7++mv8/PPP+O233/zyPRIRERERtQR+DSwmTpyI/Px8PPvss8jKykLPnj2xfv16JCUlAQCysrKQnp4unZ+cnIz169dj3rx5ePPNN5GQkIA33nhDmmEBAEOHDsWaNWvw5JNP4qmnnkLHjh2xdu1at2dYEBERERGR5/w6x6Kp4hwLIiIiaspyc3MRE+M8dDEnJwfR0dF+WhEFKk9eF/utxoKIiIiIiAIHAwsiIiIiIvIaAwsiIiIiIvIaAwsiIiIiIvIaAwsiIiIiIvIaAwsiIiIiIvKaX+dYNFViB16j0ejnlRARERHVZDKZXB7T6XR+WA0FMvH1sDsTKhhYuCD+Z01MTPTzSoiIiIjc07FjR38vgQKYyWRCeHh4nedwQJ4Ldrsd586dQ2hoKBQKRZ3nGo1GJCYm4uzZsxym10zwOWt++Jw1T3zemh8+Z80Pn7PmqTk9b4IgwGQyISEhAUpl3VUU3LFwQalUom3bth49JiwsrMn/wyBnfM6aHz5nzROft+aHz1nzw+eseWouz1t9OxUiFm8TEREREZHXGFgQEREREZHXGFh4SafT4ZlnnmEXhmaEz1nzw+eseeLz1vzwOWt++Jw1T4H6vLF4m4iIiIiIvMYdCyIiIiIi8hoDCyIiIiIi8hoDCyIiIiIi8hoDiwZq3749FAqF0+2xxx5zOic9PR033HADgoODERUVhblz58JqtfppxVSVxWJBnz59oFAokJaW5nQfn7emZdy4cWjXrh30ej3i4+MxadIknDt3zukcPmdNy+nTpzF9+nQkJycjKCgIHTt2xDPPPFPjOeHz1rS88MILGDp0KAwGAyIiIlyew+es6Vm+fDmSk5Oh1+vRv39/bNu2zd9Logq//vorbrjhBiQkJEChUOCrr75yul8QBCxcuBAJCQkICgrCFVdcgYMHD/pnsTJhYOGFZ599FllZWdLtySeflO6z2Wy47rrrUFxcjN9++w1r1qzBunXr8NBDD/lxxSR65JFHkJCQUOM4n7emZ+TIkfjss89w9OhRrFu3DidOnMCECROk+/mcNT1HjhyB3W7HO++8g4MHD+K1117D22+/jccff1w6h89b02O1WnHrrbfiH//4h8v7+Zw1PWvXrsWDDz6IJ554AqmpqRg+fDiuvfZapKen+3tpBKC4uBiXXHIJli1b5vL+l19+Ga+++iqWLVuG3bt3Iy4uDldffTVMJlMjr1RGAjVIUlKS8Nprr9V6//r16wWlUilkZmZKx1avXi3odDqhsLCwEVZItVm/fr3QrVs34eDBgwIAITU11ek+Pm9N29dffy0oFArBarUKgsDnrLl4+eWXheTkZOlzPm9N18qVK4Xw8PAax/mcNT2XXnqpMHPmTKdj3bp1Ex577DE/rYhqA0D48ssvpc/tdrsQFxcnLF68WDpmNpuF8PBw4e233/bDCuXBHQsvvPTSS4iMjESfPn3wwgsvOG0H79ixAz179nR6V3zMmDGwWCzYu3evP5ZLAM6fP497770XH3/8MQwGQ437+bw1bRcuXMAnn3yCoUOHQqPRAOBz1lwUFhaidevW0ud83pofPmdNi9Vqxd69ezF69Gin46NHj8b27dv9tCpy16lTp5Cdne30/Ol0OowYMaJZP38MLBrogQcewJo1a7B582bMmTMHS5YswaxZs6T7s7OzERsb6/SYVq1aQavVIjs7u7GXS3DkMk6dOhUzZ87EgAEDXJ7D561pevTRRxEcHIzIyEikp6fj66+/lu7jc9b0nThxAkuXLsXMmTOlY3zemh8+Z01LXl4ebDZbjeckNjaWz0czID5Hgfb8MbCoYuHChTUKsqvf9uzZAwCYN28eRowYgd69e2PGjBl4++23sWLFCuTn50vXUygUNb6GIAguj1PDufu8LV26FEajEQsWLKjzenzefM+T/2sA8PDDDyM1NRUbNmyASqXC5MmTIVSZ7cnnrHF4+rwBwLlz53DNNdfg1ltvxYwZM5zu4/Pmew15zurC56zpqf53z+ejeQm050/t7wU0JXPmzMHtt99e5znt27d3eXzw4MEAgOPHjyMyMhJxcXHYtWuX0zkXL15EWVlZjeiUvOPu8/b8889j586d0Ol0TvcNGDAAd911Fz788EM+b43E0/9rUVFRiIqKQpcuXdC9e3ckJiZi586dGDJkCJ+zRuTp83bu3DmMHDkSQ4YMwbvvvut0Hp+3xuHN77Xq+Jw1LVFRUVCpVDXe3c7JyeHz0QzExcUBcOxcxMfHS8eb+/PHwKIK8cVLQ6SmpgKA9I9jyJAheOGFF5CVlSUd27BhA3Q6Hfr37y/PggmA+8/bG2+8geeff176/Ny5cxgzZgzWrl2LQYMGAeDz1li8+b8m7lRYLBYAfM4akyfPW2ZmJkaOHIn+/ftj5cqVUCqdN8j5vDUOb/6vVcfnrGnRarXo378/Nm7ciJtuukk6vnHjRowfP96PKyN3JCcnIy4uDhs3bkTfvn0BOOpmtm7dipdeesnPq/OCv6rGm7Pt27cLr776qpCamiqcPHlSWLt2rZCQkCCMGzdOOqe8vFzo2bOnMGrUKOHPP/8Ufv75Z6Ft27bCnDlz/LhyqurUqVM1ukLxeWtadu3aJSxdulRITU0VTp8+LWzatEm47LLLhI4dOwpms1kQBD5nTVFmZqbQqVMn4corrxQyMjKErKws6Sbi89b0nDlzRkhNTRUWLVokhISECKmpqUJqaqpgMpkEQeBz1hStWbNG0Gg0wooVK4RDhw4JDz74oBAcHCycPn3a30sjQRBMJpP0/wiA9NrxzJkzgiAIwuLFi4Xw8HDhiy++EPbv3y/ccccdQnx8vGA0Gv288oZjYNEAe/fuFQYNGiSEh4cLer1e6Nq1q/DMM88IxcXFTuedOXNGuO6664SgoCChdevWwpw5c6QXQ+R/rgILQeDz1pT89ddfwsiRI4XWrVsLOp1OaN++vTBz5kwhIyPD6Tw+Z03LypUrBQAub1XxeWtapkyZ4vI527x5s3QOn7Om58033xSSkpIErVYr9OvXT9i6dau/l0QVNm/e7PL/1JQpUwRBcLScfeaZZ4S4uDhBp9MJl19+ubB//37/LtpLCkGoUgFJRERERETUAOwKRUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREREREXmNgQUREjW7Lli1QKBQoKCjw91KIiEgmnLxNREQ+d8UVV6BPnz5YsmQJAMBqteLChQuIjY2FQqHw7+KIiEgWan8vgIiIWh6tVou4uDh/L4OIiGTEVCgiIvKpqVOnYuvWrXj99dehUCigUCiwatUqp1SoVatWISIiAt999x26du0Kg8GACRMmoLi4GB9++CHat2+PVq1a4Z///CdsNpt0bavVikceeQRt2rRBcHAwBg0ahC1btvjnGyUiauG4Y0FERD71+uuv4++//0bPnj3x7LPPAgAOHjxY47ySkhK88cYbWLNmDUwmE26++WbcfPPNiIiIwPr163Hy5EnccsstuOyyyzBx4kQAwD333IPTp09jzZo1SEhIwJdffolrrrkG+/fvR+fOnRv1+yQiaukYWBARkU+Fh4dDq9XCYDBI6U9HjhypcV5ZWRneeustdOzYEQAwYcIEfPzxxzh//jxCQkKQkpKCkSNHYvPmzZg4cSJOnDiB1atXIyMjAwkJCQCAf/3rX/jxxx+xcuVK/Pvf/268b5KIiBhYEBFR02AwGKSgAgBiY2PRvn17hISEOB3LyckBAPz5558QBAFdunRxuo7FYkFkZGTjLJqIiCQMLIiIqEnQaDROnysUCpfH7HY7AMBut0OlUmHv3r1QqVRO51UNRoiIqHEwsCAiIp/TarVORddy6Nu3L2w2G3JycjB8+HBZr01ERJ5jVygiIvK59u3bY9euXTh9+jTy8vKkXQdvdOnSBXfddRcmT56ML774AqdOncLu3bvx0ksvYf369TKsmoiIPMHAgoiIfO5f//oXVCoVUlJSEB0djfT0dFmuu3LlSkyePBkPPfQQunbtinHjxmHXrl1ITEyU5fpEROQ+Tt4mIiIiIiKvcceCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi8xsCCiIiIiIi89v+BNENoQEQMrQAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "plot_attention(nf.models[0], plot=\"time\")" @@ -1884,18 +1268,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxYAAAGGCAYAAADmRxfNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdeXwV1fn48c/M3CV7QkggQQICIhIkgkBVqCw/QShagWqDStGIdcNWECstbZWALSpSCqKi/VIE+VZFCyJ0UQMCypdFqlK0oBWKDWogkJA9d5s5vz/m5oabBRMSCCTP+/Wayp2ZnDkT6L33meec82hKKYUQQgghhBBCNIHe0h0QQgghhBBCnP8ksBBCCCGEEEI0mQQWQgghhBBCiCaTwEIIIYQQQgjRZBJYCCGEEEIIIZpMAgshhBBCCCFEk0lgIYQQQgghhGgyCSyEEEIIIYQQTSaBhRBCCCGEEKLJJLAQQohm9PTTT6NpGpdeemmdx/ft20d2djZffvllrWMvv/wyixYtOrMdbEA/srKyuPDCC89KP841X375JZqmsWDBgrNyvezsbDRNOyvXEkKIM00CCyGEaEbLly8H4F//+he7du2qdXzfvn3MmTPnnAgs6uvHI488whtvvHFW+tHW/fjHP2bHjh0t3Q0hhGgWElgIIUQz+cc//sE///lPrrvuOgD++Mc/tnCPTk+PHj3o379/S3fjjPH7/QQCgRbtQ0VFBQCdO3fmyiuvbNG+CCFEc5HAQgghmklVIPHEE08wePBgXn311dAXSIAVK1bwwx/+EIARI0agaRqaprFixQqGDx/OX//6V/773/+G9p88RMbn8/Gb3/yGSy65BLfbTXJyMnfccQfHjh0L68OFF17I9ddfz1tvvcXll19OZGQkl1xySSiT8m39gLqHQnk8HmbNmkW3bt1wuVxccMEF3H///RQVFTX6+vWpGoY0f/58fvvb39KlSxciIiIYOHAgmzZtqnX+F198wa233kqHDh1wu9307t2bZ599NuycLVu2oGkaq1at4qGHHuKCCy7A7XZz4MCBb+3PwoUL6datGzExMVx11VXs3Lmz1jnr16/nqquuIioqitjYWEaNGlUrA1E13Omjjz7ipptuol27dvTo0SPsWJUVK1aE/f2fvA0fPjx03tn4+xBCiEZTQgghmqyiokLFx8erQYMGKaWUWrZsmQLUihUrQufk5+erefPmKUA9++yzaseOHWrHjh0qPz9f/etf/1JDhgxRKSkpof07duxQSillmqYaM2aMio6OVnPmzFE5OTlq2bJl6oILLlDp6emqoqIidI2uXbuqzp07q/T0dPXSSy+pt99+W/3whz9UgNq6deu39kMppW6//XbVtWvXUJuWZanRo0crh8OhHnnkEfXOO++oBQsWqOjoaNW/f3/l8Xgadf36HDp0SAEqLS1Nffe731Vr1qxRr7/+uho0aJByOp1q+/btoXP/9a9/qfj4eNW3b1/10ksvqXfeeUc99NBDStd1lZ2dHTpv8+bNClAXXHCBuummm9T69evVX/7yF1VQUHDKPlx44YVqzJgxat26dWrdunWqb9++ql27dqqoqCh07p/+9CcFqGuvvVatW7dOrV69Wg0YMEC5XC71/vvvh86bPXu2AlTXrl3Vz3/+c5WTk6PWrVsXduzkfyMn//3v2LFDLVy4UAFq6tSpZ/XvQwghGksCCyGEaAYvvfSSAtTzzz+vlFKqtLRUxcTEqKuvvjrsvNdff10BavPmzbXauO6668K+0Fd55ZVXFKDWrFkTtn/37t0KUM8991xoX9euXVVERIT673//G9pXWVmpEhMT1T333NOgftQMLN566y0FqPnz54edt3r1agWoP/zhD42+fl2qvtR36tRJVVZWhvaXlJSoxMRENXLkyNC+0aNHq86dO6vi4uKwNn7yk5+oiIgIVVhYqJSqDiyGDh16ymvX7EPfvn1VIBAI7f/ggw8UoF555RWllB3sderUSfXt21eZphk6r7S0VHXo0EENHjw4tK8qeHj00UdrXa9mYFHTZ599ptq3b69GjBihvF6vUurs/X0IIURjyVAoIYRoBn/84x+JjIzk5ptvBiAmJoYf/vCHvP/++3zxxRdNavsvf/kLCQkJfP/73ycQCIS2fv36kZKSwpYtW8LO79evH126dAm9joiI4OKLL+a///3vaV3/3XffBewhUif74Q9/SHR0dK1hSk29/g9+8AMiIiJCr2NjY/n+97/Pe++9h2maeDweNm3axIQJE4iKigr7nYwdOxaPx1Nr2NKNN97YmFvmuuuuwzCM0OuMjAyA0D18/vnnfPPNN0yePBldr/4ojYmJ4cYbb2Tnzp1hw+BOpw9HjhxhzJgxpKam8sYbb+ByuYCz//chhBANJYGFEEI00YEDB3jvvfe47rrrUEpRVFREUVERN910E0CTx7MfPXqUoqIiXC4XTqczbDty5AjHjx8PO799+/a12nC73VRWVp7W9QsKCnA4HCQnJ4ft1zSNlJQUCgoKmvX6KSkpde7z+XyUlZVRUFBAIBBgyZIltX4fY8eOBaj1O0lNTW3Qteu7B7fbDRC6h6p7rqvdTp06YVkWJ06cOO0+lJaWMnbsWPx+P3//+9+Jj48PHTvbfx9CCNFQjpbugBBCnO+WL1+OUoo///nP/PnPf651fOXKlfzmN78JewLeGElJSbRv35633nqrzuOxsbGn1W5DtW/fnkAgwLFjx8K+zCqlOHLkCIMGDWrW6x05cqTOfS6Xi5iYGJxOJ4ZhMHnyZO6///462+jWrVvY6+auFVH1ZT0vL6/WsW+++QZd12nXrt1p9cHv93PjjTdy8OBB3n//fTp37lzr2mfz70MIIRpKMhZCCNEEpmmycuVKevTowebNm2ttDz30EHl5efz9738Haj/5Pll9T5Gvv/56CgoKME2TgQMH1tp69erV6H6fqh81XXPNNQD87//+b9j+NWvWUF5eHjreXNauXYvH4wm9Li0tZcOGDVx99dUYhkFUVBQjRozg448/JiMjo87fSV1P6ZtTr169uOCCC3j55ZdRSoX2l5eXs2bNmtBKUafjzjvvZMuWLaxduzY0BOtkZ/vvQwghGkoyFkII0QR///vf+eabb3jyySfDlgOtcumll/LMM8/wxz/+keuvvz5UkfsPf/gDsbGxRERE0K1bN9q3b0/fvn1Zu3YtS5cuZcCAAei6zsCBA7n55pv505/+xNixY5k2bRrf+c53cDqdfPXVV2zevJlx48YxYcKERvX7VP2oadSoUYwePZqf//znlJSUMGTIEPbu3cvs2bPp378/kydPbvwv7hQMw2DUqFHMmDEDy7J48sknKSkpYc6cOaFzFi9ezHe/+12uvvpq7rvvPi688EJKS0s5cOAAGzZsCM1DOFN0XWf+/PlMmjSJ66+/nnvuuQev18tTTz1FUVERTzzxxGm1+9RTT7Fq1Sp++tOfEh0dHTZXJC4ujvT09LP+9yGEEA0lgYUQQjTBH//4R1wuF3fccUedx5OSkpgwYQJ//vOfOXr0KN26dWPRokUsXryY4cOHY5omL774IllZWUybNo1//etf/PKXv6S4uBhlr9yHYRisX7+exYsXs2rVKh5//HEcDgedO3dm2LBh9O3bt9H9PlU/atI0jXXr1pGdnc2LL77Ib3/7W5KSkpg8eTLz5s0LZT+ay09+8hM8Hg8PPPAA+fn59OnTh7/+9a8MGTIkdE56ejofffQRjz32GL/+9a/Jz88nISGBnj17huZZnGm33nor0dHRPP7440ycOBHDMLjyyivZvHkzgwcPPq02//WvfwGwZMkSlixZEnZs2LBhobocZ/PvQwghGkpTJ+dwhRBCiBby5Zdf0q1bN5566il+9rOftXR3hBBCNJLMsRBCCCGEEEI0mQQWQgghhBBCiCaToVBCCCGEEEKIJpOMhRBCCCGEEKLJJLAQQgghhBBCNJkEFkIIIYQQQogmkzoWdbAsi2+++YbY2Fg0TWvp7gghhBBCCNEilFKUlpbSqVMndP3UOQkJLOrwzTffkJaW1tLdEEIIIYQQ4pxw+PBhOnfufMpzJLCoQ2xsLGD/AuPi4lq4N0IIIYQQQrSMkpIS0tLSQt+PT0UCizpUDX+Ki4uTwEIIIYQQQrR5DZkeIJO3hRBCCCGEEE0mGQshhBBCiPNMeXk5Tz31VNi+hx9+mOjo6BbqkRBSebtOJSUlxMfHU1xcLEOhhBBCCHHOOXbsGB06dAjbl5+fT3Jycgv1SLRWjfleLBkLIYQQQohzlGma+P3+Wvv9fj9du3attc/j8ZytrolWwul0YhhGs7QlgYUQQgghxDlGKcWRI0coKiqq87hpmjz//PNh+woLCykuLj4LvROtTUJCAikpKU2u3yaBhRBCCCHEOaYqqOjQoQNRUVG1vvDVlZ3o2rUrTqfzbHZTnOeUUlRUVJCfnw9Aampqk9qTwEIIIYQQ4hximmYoqGjfvn2d59Q1dCUiIkICC9FokZGRgD1Hp0OHDk0aFiXLzQohhBBCnEOq5lRERUW1cE9EW1H1b62u+TyNIYGFEEIIIcQ5qKnj3YVoqOb6tyaBhRBCCCGEEKLJJLAQQgjR7HyeAG//z6cc/Ci/pbsihDiLhg8fzvTp08+7tkXzkMBCCCFEs/vqsxMc+DCfj3NyW7orQohWYu3atTz22GMt3ofRo0eTlJSEpmns2bOnRftzrpHAQgghRLPze82w/wohxOmqmlCcmJhIbGxsi/alvLycIUOG8MQTT7RoP85VElgIIYRodgGfBBZCtFWWZTFz5kwSExNJSUkhOzs77Hhubi7jxo0jJiaGuLg4MjMzOXr0aOh4dnY2/fr1Y/ny5XTv3h23241SKmwo1JYtW9A0rdaWlZUVamfp0qX06NEDl8tFr169WLVqVVg/NE1j2bJlTJgwgaioKHr27Mn69etPeW+TJ0/m0UcfZeTIkU36HbVWElgIIYRodgGfFfyvBBZCNAelFBW+QNjmCaiwrebx5tqUUo3q68qVK4mOjmbXrl3Mnz+fuXPnkpOTE7qP8ePHU1hYyNatW8nJyeHgwYNMnDgxrI0DBw7w2muvsWbNmjqHGw0ePJi8vLzQ9u677xIREcHQoUMBeOONN5g2bRoPPfQQn376Kffccw933HEHmzdvDmtnzpw5ZGZmsnfvXsaOHcukSZMoLCxs1P2KalIgTwghRLMLDYUKBhhCiKap9JukP/r2qU9a9+4Zufa+uaOJcjX8K2NGRgazZ88GoGfPnjzzzDNs2rSJUaNGsXHjRvbu3cuhQ4dIS0sDYNWqVfTp04fdu3czaNAgAHw+H6tWrSI5ObnOa7hcLlJSUgAoKCjgrrvuYsqUKUyZMgWABQsWkJWVxdSpUwGYMWMGO3fuZMGCBYwYMSLUTlZWFrfccgsA8+bNY8mSJXzwwQeMGTOmMb8iESQZCyGEEM2uKlMR8JmNftophDi/ZWRkhL1OTU0lP99eIW7//v2kpaWFggqA9PR0EhIS2L9/f2hf165d6w0qTub3+7nxxhvp0qULixcvDu3fv38/Q4YMCTt3yJAhYdeo2dfo6GhiY2NDfRWNJxkLIYQQza5qKBQKAn4Lp8to2Q4JcZ6LdBrsmzu6xa7dGE6nM+y1pmlYlv2eoJSqsxhbzf3R0dENutZ9991Hbm4uu3fvxuEI/1pb8zp1XftUfRWN1+IZi+eee45u3boRERHBgAEDeP/99+s9d9u2bQwZMoT27dsTGRnJJZdcwu9///ta561Zs4b09HTcbjfp6em88cYbZ/IWhBBC1OA/aW6FzLMQouk0TSPK5WiRrTkrgKenp5Obm8vhw4dD+/bt20dxcTG9e/duVFsLFy5k9erVrF+/nvbt24cd6927N9u2bQvbt3379kZfQzROi2YsVq9ezfTp03nuuecYMmQIL7zwAt/73vfYt28fXbp0qXV+dHQ0P/nJT8jIyCA6Oppt27Zxzz33EB0dzd133w3Ajh07mDhxIo899hgTJkzgjTfeIDMzk23btnHFFVec7VsUQog26eRgwu81iYxpwc4IIc4ZI0eOJCMjg0mTJrFo0SICgQBTp05l2LBhDBw4sMHtbNy4kZkzZ/Lss8+SlJTEkSNHAIiMjCQ+Pp6HH36YzMxMLr/8cq655ho2bNjA2rVr2bhxY5P6X1hYSG5uLt988w0An3/+OQApKSmhOR9tWYtmLBYuXMidd97Jj3/8Y3r37s2iRYtIS0tj6dKldZ7fv39/brnlFvr06cOFF17Ij370I0aPHh2W5Vi0aBGjRo1i1qxZXHLJJcyaNYtrrrmGRYsWnaW7EkIIEZ6xkGEFQgibpmmsW7eOdu3aMXToUEaOHEn37t1ZvXp1o9rZtm0bpmly7733kpqaGtqmTZsGwPjx41m8eDFPPfUUffr04YUXXuDFF19k+PDhTer/+vXr6d+/P9dddx0AN998M/379+f5559vUruthaZaaFadz+cjKiqK119/nQkTJoT2T5s2jT179rB169ZvbePjjz/me9/7Hr/5zW/48Y9/DECXLl148MEHefDBB0Pn/f73v2fRokX897//rbMdr9eL1+sNvS4pKSEtLY3i4mLi4uJO9xaFEKLNWvXUVkoO2sHFD2cNpENXeS8VoqE8Hg+HDh0KDRUX4kw71b+5kpIS4uPjG/S9uMUyFsePH8c0TTp27Bi2v2PHjqF0Vn06d+6M2+1m4MCB3H///aGgAuDIkSONbvPxxx8nPj4+tJ28UoEQQojG83r9oT9LkTwhhGgbWnxVqIbM2K/p/fffp6ysjJ07d/KLX/yCiy66KLQG8em0OWvWLGbMmBF6XZWxEEIIcXpMn0XVsysZCiVE87Msi+PHj4ftS0pKQtdbfF0e0Ya1WGCRlJSEYRi1Mgn5+fm1Mg41devWDYC+ffty9OhRsrOzQ4FFSkpKo9t0u9243e7TuQ0hhBB1sPzVo2wlYyFE8zNNk9zc3LB97dq1k8BCtKgW+9fncrkYMGBAqMR7lZycHAYPHtzgdpRSYfMjrrrqqlptvvPOO41qUwghRNNY/uossSw3K4QQbUOLDoWaMWMGkydPZuDAgVx11VX84Q9/IDc3l3vvvRewhyh9/fXXvPTSSwA8++yzdOnShUsuuQSwVwRYsGABP/3pT0NtTps2jaFDh/Lkk08ybtw43nzzTTZu3FhrLWMhhBBnjvJDVWghGQshhGgbWjSwmDhxIgUFBcydO5e8vDwuvfRS/va3v9G1a1cA8vLywtJ8lmUxa9YsDh06hMPhoEePHjzxxBPcc889oXMGDx7Mq6++yq9//WseeeQRevTowerVq6WGhRBCnE2B6oyFx+M9xYlCCCFaixZbbvZc1phltYQQQoRTluK5qZtDr/uOSWHo+PQW7JEQ55eGLDfr9/v55z//Gbbvsssuw+l0no0uilbmvF9uVgghROsU8IevAuXx+FqoJ0IIIc4mCSyEEEI0q5qTtU+uaSGEEKL1ksBCCCFEs6o5WdsngYUQbcbw4cOZPn36ede2aB4SWAghhGhWNQvi+byBFuqJEKI1Wbt2LY899liLXd/v9/Pzn/+cvn37Eh0dTadOnbjtttv45ptvWqxP5xoJLIQQQjSrgD88YyHLzQohmsLvt7OeiYmJxMbGtlg/Kioq+Oijj3jkkUf46KOPWLt2Lf/+97+54YYbWqxP5xoJLIQQQjSrmnMs/FIgT4g2xbIsZs6cSWJiIikpKWRnZ4cdz83NZdy4ccTExBAXF0dmZiZHjx4NHc/OzqZfv34sX76c7t2743a7UUqFDYXasmULmqbV2rKyskLtLF26lB49euByuejVqxerVq0K64emaSxbtowJEyYQFRVFz549Wb9+fb33FR8fT05ODpmZmfTq1Ysrr7ySJUuW8OGHH9aqgt5WSWAhhBCiWfm94UOhzBpDo4QQp0Ep8JWHbXqgMmyrebzZtkZWJli5ciXR0dHs2rWL+fPnM3fuXHJycoK3oRg/fjyFhYVs3bqVnJwcDh48yMSJE8PaOHDgAK+99hpr1qxhz549ta4xePBg8vLyQtu7775LREQEQ4cOBeCNN95g2rRpPPTQQ3z66afcc8893HHHHWzevDmsnTlz5pCZmcnevXsZO3YskyZNorCwsMH3WlxcjKZpJCQkNOp31Fq1aIE8IYQQrU/NjIXpl3JJQjSZvwLmdQq9dAKX1zznrTN07V9+A67oBp+ekZHB7NmzAejZsyfPPPMMmzZtYtSoUWzcuJG9e/dy6NAh0tLSAFi1ahV9+vRh9+7dDBo0CACfz8eqVatITk6u8xoul4uUlBQACgoKuOuuu5gyZQpTpkwBYMGCBWRlZTF16lQAZsyYwc6dO1mwYAEjRowItZOVlcUtt9wCwLx581iyZAkffPABY8aM+db79Hg8/OIXv+DWW2+VumdBkrEQQgjRrKoCC79eCYAli0IJ0aZkZGSEvU5NTSU/Px+A/fv3k5aWFgoqANLT00lISGD//v2hfV27dq03qDiZ3+/nxhtvpEuXLixevDi0f//+/QwZMiTs3CFDhoRdo2Zfo6OjiY2NDfX126578803Y1kWzz333Lee31ZIxkIIIUSz8njsSCKuvIzKyEiUX2vhHgnRCjij7MxBkN/v55NPPgk7pW/fvmem8rYzqnGn1+iDpmlYlj0kUimFptV+T6i5Pzq6YRmS++67j9zcXHbv3o3DEf61tuZ16rr2qfpaH7/fT2ZmJocOHeLdd9+VbMVJJLAQQgjRrDweLwBOXymVkckQkMBCiCbTtLDhSE4XXH7l1S3YodOTnp5Obm4uhw8fDmUt9u3bR3FxMb17925UWwsXLmT16tXs2LGD9u3bhx3r3bs327Zt47bbbgvt2759e6OvUVNVUPHFF1+wefPmWtdt6ySwEEII0awqvXZg4fKX2Tv8er1PKYUQbcvIkSPJyMhg0qRJLFq0iEAgwNSpUxk2bBgDBw5scDsbN25k5syZPPvssyQlJXHkyBEAIiMjiY+P5+GHHyYzM5PLL7+ca665hg0bNrB27Vo2btx42n0PBALcdNNNfPTRR/zlL3/BNM3QdRMTE3G5XKfddmshcyyEEEI0K4/HB4AzGFhoaJh+WRlKCGEPNVq3bh3t2rVj6NChjBw5ku7du7N69epGtbNt2zZM0+Tee+8lNTU1tE2bNg2A8ePHs3jxYp566in69OnDCy+8wIsvvsjw4cNPu+9fffUV69ev56uvvqJfv35h192+fftpt9uaaEo1cg2xNqCkpIT4+HiKi4tl3JwQQjTSmyt38dWOcrrkvkNul2sBuHPB1UTEnIGx30K0Qh6Ph0OHDtGtWzciIiJaujuiDTjVv7nGfC+WjIUQQohm5fcFAHAEPGjBJaGkSJ4QQrR+ElgIIYRoVj6vHUzolg/DtIdF+b0SWAghRGsnk7eFEEI0Kzs7oWGYfgzTR8AZXatonhCiaSzLoqioKGxfQkICui7PjEXLkcBCCCFEswr4LMDAsLwYlje4TwILIZqTaZr85z//Cdt32WWXSWAhWpT86xNCCNGsTJ+9ApRu+tBDQ6FkVSghhGjtJLAQQgjRrEyfvdigYflDcywkYyGEEK2fBBZCCCGalWUvCoVhejEsO7DwyeRtIYRo9SSwEEII0ayU366wrZvVq0J5vb6W7JIQQoizQAILIYQQzSsYWBhW9RyLykpvS/ZICCHEWSCBhRBCiOYVsD9adNMfGgrl8UhgIURbMHz4cKZPn37etS2ahwQWQgghmo1lKTTL/mgxLC+GaQcUHq8EFkKIplm7di2PPfZYi/YhOzubSy65hOjoaNq1a8fIkSPZtWtXi/bpXCKBhRBCiGZz8upPxslzLDz+luqSEOI85/fb7x+JiYnExsa2aF8uvvhinnnmGT755BO2bdvGhRdeyLXXXsuxY8datF/nCgkshBBCNJuAr7pehW4F0C0JLIRoayzLYubMmSQmJpKSkkJ2dnbY8dzcXMaNG0dMTAxxcXFkZmZy9OjR0PHs7Gz69evH8uXL6d69O263G6VU2FCoLVu2oGlarS0rKyvUztKlS+nRowcul4tevXqxatWqsH5omsayZcuYMGECUVFR9OzZk/Xr15/y3m699VZGjhxJ9+7d6dOnDwsXLqSkpIS9e/c26XfWWkhgIYQQotlUZSx004uGCmUs/FLHQogmUUpR4a8IbZWBSryWN2yrDFSGndNcm1KqUX1duXIl0dHR7Nq1i/nz5zN37lxycnJC9zF+/HgKCwvZunUrOTk5HDx4kIkTJ4a1ceDAAV577TXWrFnDnj17al1j8ODB5OXlhbZ3332XiIgIhg4dCsAbb7zBtGnTeOihh/j000+55557uOOOO9i8eXNYO3PmzCEzM5O9e/cyduxYJk2aRGFhYYPu0+fz8Yc//IH4+Hguu+yyRv2OWitHS3dACCFE61EVQFQFFFVzLPxSx0KIJqkMVHLFy1ec+qT9Z+bau27dRZQzqsHnZ2RkMHv2bAB69uzJM888w6ZNmxg1ahQbN25k7969HDp0iLS0NABWrVpFnz592L17N4MGDQLsL+2rVq0iOTm5zmu4XC5SUlIAKCgo4K677mLKlClMmTIFgAULFpCVlcXUqVMBmDFjBjt37mTBggWMGDEi1E5WVha33HILAPPmzWPJkiV88MEHjBkzpt77+8tf/sLNN99MRUUFqamp5OTkkJSU1ODfT2smGQshhBDNJuC1h0Lplj/4XzvAMP1WvT8jhGhdMjIywl6npqaSn58PwP79+0lLSwsFFQDp6ekkJCSwf391ZNS1a9d6g4qT+f1+brzxRrp06cLixYtD+/fv38+QIUPCzh0yZEjYNWr2NTo6mtjY2FBf6zNixAj27NnD9u3bGTNmDJmZmd/6M22FZCyEEEI0m0AoY+EN/tcX3C+BhRBNEemIZNet1asPBQIBPvnkk7Bz+vbti8PR/F/tIh2RjTrf6XSGvdY0Dcuy3wOUUmiaVutnau6Pjo5u0LXuu+8+cnNz2b17d617r3mduq59qr7WJzo6mosuuoiLLrqIK6+8kp49e/LHP/6RWbNmNajPrZkEFkIIIZpNaCiUVTUUyv6vJYW3hWgSTdPChiP58ePW3WHnRDoia31RPtekp6eTm5vL4cOHQ1mLffv2UVxcTO/evRvV1sKFC1m9ejU7duygffv2Ycd69+7Ntm3buO2220L7tm/f3uhrNIRSCq8sqQ1IYCGEEKIZVWUmdNMeClUVYKhAi3VJiFbJ4XDUmjB8JrIVzW3kyJFkZGQwadIkFi1aRCAQYOrUqQwbNoyBAwc2uJ2NGzcyc+ZMnn32WZKSkjhy5AgAkZGRxMfH8/DDD5OZmcnll1/ONddcw4YNG1i7di0bN2487b6Xl5fz29/+lhtuuIHU1FQKCgp47rnn+Oqrr/jhD3942u22Ji0+x+K5556jW7duREREMGDAAN5///16z127di2jRo0iOTmZuLg4rrrqKt5+++2wc1asWFHn8mMej+dM34oQQrR5Xk8wU2H5KHFFhoZEKX/toQ9CiNOnaRpOpzNsq2uI0blG0zTWrVtHu3btGDp0aGjp1tWrVzeqnW3btmGaJvfeey+pqamhbdq0aQCMHz+exYsX89RTT9GnTx9eeOEFXnzxRYYPH37afTcMg88++4wbb7yRiy++mOuvv55jx47x/vvv06dPn9NutzXRVGPXEGtGq1evZvLkyTz33HMMGTKEF154gWXLlrFv3z66dOlS6/zp06fTqVMnRowYQUJCAi+++CILFixg165d9O/fH7ADi2nTpvH555+H/WzVygENUVJSQnx8PMXFxcTFxTXtJoUQog3ZvekAH7yeS/Kxj4nPXU9SQLHzimyUw+Qnz4xq6e4JcV7weDwcOnQo9OBViDPtVP/mGvO9uEVzZgsXLuTOO+/kxz/+MQCLFi3i7bffZunSpTz++OO1zl+0aFHY63nz5vHmm2+yYcOGUGABdjTcmEBCCCFE8/B47AyFbvopdMfS0RusRmvq9U7aFEII0Tq02FAon8/Hhx9+yLXXXhu2/9prr2X79u0NasOyLEpLS0lMTAzbX1ZWRteuXencuTPXX389H3/8cbP1WwghRP08Jw2FKnLHhiZva0rDDMjKUEII0Zq1WGBx/PhxTNOkY8eOYfs7duwYmoDzbX73u99RXl5OZmZmaN8ll1zCihUrWL9+Pa+88goREREMGTKEL774ot52vF4vJSUlYZsQQojG83irC+OdiIgN1bEAWXJWCCFauxZfPqAhawzX5ZVXXiE7O5s333yTDh06hPZfeeWVXHnllaHXQ4YM4fLLL2fJkiU8/fTTdbb1+OOPM2fOnNO8AyGEEFV83urCeKXuKBQKzfKjdCd+r0lE9Lm9FKYQ5wvLsigvLw/bFx0dja63+Lo8og1rsX99SUlJGIZRKzuRn59fK4tR0+rVq7nzzjt57bXXGDly5CnP1XWdQYMGnTJjMWvWLIqLi0Pb4cOHG34jQgghQnzeqgJ5fiodbiodzpOK5Jkt2TUhWhXTNPn888/DNtOU/4+JltVigYXL5WLAgAHk5OSE7c/JyWHw4MH1/twrr7xCVlYWL7/8Mtddd923XkcpxZ49e0hNTa33HLfbTVxcXNgmhBCi8fxeu2CFYXrxOlx4HM5QLQsZCiWEEK1biw6FmjFjBpMnT2bgwIFcddVV/OEPfyA3N5d7770XsDMJX3/9NS+99BJgBxW33XYbixcv5sorr6xVDAVgzpw5ofLqJSUlPP300+zZs4dnn322ZW5SCCHakIDfBBzolo9Kw0WlwxXKWPi98jRVCCFasxYNLCZOnEhBQQFz584lLy+PSy+9lL/97W907doVgLy8PHJzc0Pnv/DCCwQCAe6//37uv//+0P7bb7+dFStWAFBUVMTdd9/NkSNHiI+Pp3///rz33nt85zvfOav3JoQQbVFVVsIw/XgcLjwOF3pVYCFDoYQQolVr8cnbU6dOZerUqXUeqwoWqmzZsuVb2/v973/P73//+2bomRBCiMYyfXbNVcPy4XQE8Bjuk4ZCSWAhhBCtmSwdIIQQotlY9qJQ6KaXpRGLSHMewzDtonmeSt8pflII0RoMHz6c6dOnn3dti+YhgYUQQohmo4KBhWH5cTgtYh2e0FCoikpPC/ZMCHG+W7t2LY899lhLdyPknnvuQdM0Fi1a1NJdOWe0+FAoIYQQrYcKaGiAYfrQHQqXwwwNhar0elu2c0KI85Lf78fpdJKYmNjSXQlZt24du3btolOnTi3dlXOKZCyEEEI0n4Bd4FQ3vcHAwh9aFcpTKYGFEG2BZVnMnDmTxMREUlJSyM7ODjuem5vLuHHjiImJIS4ujszMTI4ePRo6np2dTb9+/Vi+fDndu3fH7XajlAobCrVlyxY0Tau1ZWVlhdpZunQpPXr0wOVy0atXL1atWhXWD03TWLZsGRMmTCAqKoqePXuyfv36b72/r7/+mp/85Cf86U9/wumUop8nk8BCCCFEs9EC9seKYdkZC/dJgYU3WJVbCNF4SimsioqwDY8nbKt5vLk2pVSj+rpy5Uqio6PZtWsX8+fPZ+7cuaG6ZUopxo8fT2FhIVu3biUnJ4eDBw8yceLEsDYOHDjAa6+9xpo1a9izZ0+tawwePJi8vLzQ9u677xIREcHQoUMBeOONN5g2bRoPPfQQn376Kffccw933HEHmzdvDmtnzpw5ZGZmsnfvXsaOHcukSZMoLCys994sy2Ly5Mk8/PDD9OnTp1G/l7ZAhkIJIYRoFpZpoVkGALrpRzMUTqeF7rEzFT4JLIQ4baqyks8vHxC2L7rGOf85Q9fu9dGHaFFRDT4/IyOD2bNnA9CzZ0+eeeYZNm3axKhRo9i4cSN79+7l0KFDpKWlAbBq1Sr69OnD7t27GTRoEAA+n49Vq1aRnJxc5zVcLhcpKSkAFBQUcNdddzFlyhSmTJkCwIIFC8jKygqtPDpjxgx27tzJggULGDFiRKidrKwsbrnlFgDmzZvHkiVL+OCDDxgzZkyd133yySdxOBw88MADDf59tCWSsRBCCNEsTq6srTQfmga6Q2GYdkDhC1blFkK0bhkZGWGvU1NTyc/PB2D//v2kpaWFggqA9PR0EhIS2L9/f2hf165d6w0qTub3+7nxxhvp0qULixcvDu3fv38/Q4YMCTt3yJAhYdeo2dfo6GhiY2NDfa3pww8/ZPHixaxYsQJN0761b22RZCyEEEI0i1ABPGVhGXYQoTus0HKzfgkshDhtWmQkvT76MPTa7/fzySefhJ3Tt2/fMzLmX4uMbNT5NfugaRqWZT94UErV+aW85v7o6Jr5mLrdd9995Obmsnv3bhyO8K+1Na9T17VP1dea3n//ffLz8+nSpUton2maPPTQQyxatIgvv/yyQX1uzSSwEEII0SxCVbctPziDk7gdKrQqlN9X94e1EOLbaZoWNhxJ9/shIiLsHD0qCv0cn0ycnp5Obm4uhw8fDmUt9u3bR3FxMb17925UWwsXLmT16tXs2LGD9u3bhx3r3bs327Zt47bbbgvt2759e6OvcbLJkyczcuTIsH2jR49m8uTJ3HHHHafdbmsigYUQQohmUVVZWzd94LAne+oOFapjYUpgIUSbN3LkSDIyMpg0aRKLFi0iEAgwdepUhg0bxsCBAxvczsaNG5k5cybPPvssSUlJHDlyBIDIyEji4+N5+OGHyczM5PLLL+eaa65hw4YNrF27lo0bN55239u3b18rgHE6naSkpNCrV6/Tbrc1kTkWQgghmkXVUCjD9KKdFFgYocCicSvLCCHq53A46NOnT9hWcyjQuUjTNNatW0e7du0YOnQoI0eOpHv37qxevbpR7Wzbtg3TNLn33ntJTU0NbdOmTQNg/PjxLF68mKeeeoo+ffrwwgsv8OKLLzJ8+PAzcFeiiqYau4ZYG1BSUkJ8fDzFxcXExcW1dHeEEOK88NXnJ3jz9x8TXZ7HgMNz6Tv8CJWFTj7a/V32XPYAJHq5f973WrqbQpzzPB4Phw4dolu3bkTUGO4kxJlwqn9zjfleLBkLIYQQzaJqcrZu+nA6gsOiHFYoY6Fk7rYQQrRqElgIIYRoFh6PHUAYlheXURVYVM+xUH5ZnlEIIVozCSyEEEI0i0qPBwDD9BPhrA4sqlaFwi8fOUII0ZrJu7wQQohmUVlp16vQLR+GUXvytmbqyLQ+IYRovc795QOEEEKcFzxeO7AwTC96cFUoTQeFvV9TOlZAYThlSJQQTaWUwhPMElaJiIiQitCiRUlgIYQQoll4PH7AHgqlu6trVijdH/qz32diOCVZLkRTBQIB/vWvf4Xtu+yyy85I5W0hGkre3YUQQjQLr9ce8qRbvlDGAgDDQrPsJaGqiugJIYRofSSwEEII0Syqlps1TF+oQB4ATkLzLPxeCSyEEKK1ksBCCCFEs/AFsxG65QVD8Y/CzuR7ou0lZ4MrQwV81qmaEEIIcR6TwEIIIdq47V9v5z9F/2lyO1XDnAzTTx5xbD3ajS1Hu2M4LAzTnsDtl6FQQrRqw4cPZ/r06edd26J5SGAhhBBt2Ddl33DPxnuYvmV6k9sKeO1shGH5qNTstUHKAi4cDjM0FCogQ6GEEKdp7dq1PPbYY3Ues5TCPAvLWWdlZaFpWth25ZVXnvHrni9kVSghhGjDjpQfCftvU5h+O7DQTS9+zX5u5TUdRDgCoSJ5Xq+/3p8XQoi6+P1+nE4niYmJ9Z7z77ISAuj0jonG0M7sc/MxY8bw4osvhl67XK4zer3ziWQshBCiDSvzlwFQGajEbzXtS7/pt58WGqYfv2YA4LUcOA0LPZixqKisbNI1hBDnPsuymDlzJomJiaSkpJCdnR12PDc3l3HjxhETE0NcXByZmZkcPXo0dDw7O5t+/fqxfPlyunfvjtvtRikVNhRqy5YtYVmD3nEJ9I2LI+v220PtLF26lB49euByuejVqxerVq0K64emaSxbtowJEyYQFRVFz549Wb9+/bfen9vtJiUlJbSdKuBpaySwEEKINqykspTRn/2Yy78aRbmvvEltBZMSGJYPH3ZgYSodHIQyFhXB6txCiMZRSuH3mmGb6VdhW83jzbWpRg4xWrlyJdHR0ezatYv58+czd+5ccnJyQvcxfvx4CgsL2bp1Kzk5ORw8eJCJEyeGtXHgwAFee+011qxZw549e2pdY/DgweTl5ZGXl8eXX+WS89r/EBHh5qohVwHwxhtvMG3aNB566CE+/fRT7rnnHu644w42b94c1s6cOXPIzMxk7969jB07lkmTJlFYWHjK+9uyZQsdOnTg4osv5q677iI/P79Rv5/WTIZCCSFEG3bsPyV0O9GXCwu7UeovJSEi4fQbCyY8DNOLPxhYAJgOPTTHwuORwEKI0xHwWfxh2tZTnvMPtp+Ra9+9eBhOt/HtJwZlZGQwe/ZsAHr27MkzzzzDpk2bGDVqFBs3bmTv3r0cOnSItLQ0AFatWkWfPn3YvXs3gwYNAsDn87Fq1SqSk5PrvIbL5SIlJQWAw3mHuXfmHKZMvIFJP7IDlAULFpCVlcXUqVMBmDFjBjt37mTBggWMGDEi1E5WVha33HILAPPmzWPJkiV88MEHjBkzps7rfu973+OHP/whXbt25dChQzzyyCP8v//3//jwww9xu90N/h21VpKxEEKINqzs828A0Iik1FvatMYCGgC65cenqr+EBBx6aFUoj8fXtGsIIc55GRkZYa9TU1NDT/X3799PWlpaKKgASE9PJyEhgf3794f2de3atd6g4mR+v59bJ95Cl04pLJ77MCpYjHP//v0MGTIk7NwhQ4aEXaNmX6Ojo4mNjT1lBmLixIlcd911XHrppXz/+9/n73//O//+97/561//+q19bQskYyGEEG2Yv9SHG0AzKCotgaQmNBawn1UpfPis6sDCb+ihOhYyeVuI0+Nw6dy9eFjotd/v55NPPgk7p2/fvjidzjNy7cao2QdN07Ase3EHpRSaptX6mZr7o6OjG3St++67j8OHv+LDDS/icDhQVvXKczWvU9e1T9XXhkhNTaVr16588cUXDf6Z1kwCCyGEaMNUefUX/eNHj0O3029LM+2PFE334bWqP6z9uhEaCuWTwEKI06JpWvhwJN3CcNb4kuw2cDobPmSpJaSnp5Obm8vhw4dDWYt9+/ZRXFxM7969G9XWwoULWb16NW/nbKB9Yry9MxhY9O7dm23btnHbbbeFzt++fXujr/FtCgoKOHz4MKmpqc3a7vlKAgshhGjDtMrqp3vFR049YfFUTNNCU/ZTTV3z4DUjQ8f8un5SYBE47WsIIc5/I0eOJCMjg0mTJrFo0SICgQBTp05l2LBhDBw4sMHtbNy4kZkzZ/Lss8+S2C6eI/nHAfDGG7Rrl8bDDz9MZmYml19+Oddccw0bNmxg7dq1bNy48bT7XlZWRnZ2NjfeeCOpqal8+eWX/PKXvyQpKYkJEyacdrutiQQWQgjRhhknzaUuLyg57XYCvuqhA7ruw69rdBn+DUWHYvHlG6FVoaTythBtm6ZprFu3jp/+9KcMHToUXdcZM2YMS5YsaVQ727ZtwzRN7r333rD9t0z8AS+/uobx48ezePFinnrqKR544AG6devGiy++yPDhw0+774Zh8Mknn/DSSy9RVFREamoqI0aMYPXq1cTGxp52u62Jphq7hlgbUFJSQnx8PMXFxcTFxbV0d4QQ4oxZctdcdOO7AJg9/8EDD808rXbKi7ys+MX/gbL47r/u46MhKXQZ/Q3lRyNo944D75cD+bzXJFzdvdw183vNeQtCtDoej4dDhw7RrVs3IiIi6jzHsizKy8OXiI6OjkbX2966PIXHD5HoKwKg2IghvmPPlu3QeehU/+Ya871YMhZCCNGG6ZaLqpVhzRLPabdTlYkwTC+6wwK3/czKEWHi09wYpj23wvTJsywhmoOu6/KUPEg7abK1oSQr2pJaPKx97rnnQtHRgAEDeP/99+s9d+3atYwaNYrk5GTi4uK46qqrePvtt2udt2bNGtLT03G73aSnp/PGG2+cyVsQQojzlm6dtO56xelPrK4aCmVYfpRTw3AFAw23hQ8Dw7LHXFVV5xZCiOaiK6vOP4uzr0UDi9WrVzN9+nR+9atf8fHHH3P11Vfzve99j9zc3DrPf++99xg1ahR/+9vf+PDDDxkxYgTf//73+fjjj0Pn7Nixg4kTJzJ58mT++c9/MnnyZDIzM9m1a9fZui0hhDhvaKo65a2dfsKCQDBjoZs+lEPHcAcDDZeJV1VP3rZkUSghRDPTT8pSOFTjK4WL5tOigcXChQu58847+fGPf0zv3r1ZtGgRaWlpLF26tM7zFy1axMyZMxk0aBA9e/Zk3rx59OzZkw0bNoSdM2rUKGbNmsUll1zCrFmzuOaaa1i0aNFZuishhDg/WMpCozqw0P2nv0ylP7jak2F6sRzVGQtNB7/DQA8GFkh9PCFEMzNOylIYmEjOouW0WGDh8/n48MMPufbaa8P2X3vttWzf3rCS9JZlUVpaSmJiYmjfjh07arU5evToU7bp9XopKSkJ24QQorWr8FeAXr0srG6e/rS7Sq8dMRiWH2VoGK7qj3a/s7pAHv7ahbGEEOJ0KaVqBBaKgCXzLFpKiwUWx48fxzRNOnbsGLa/Y8eOHDlypEFt/O53v6O8vJzMzMzQviNHjjS6zccff5z4+PjQdnKZeSGEaK3K/GUo7eTAwnXabVVUVAbb8KEMPZSxAPA7NDQtuK5toMWn9gnRKiil8Pv9YVtbHAJkYWcpTmaaMuaypbT4qlANKbdel1deeYXs7GzefPNNOnTo0KQ2Z82axYwZM0KvS0pKJLgQQrR6Jd4SLKM6sDh5vkVjVXrswMGwfASM6jkWAKYTCAYWmtXiHztCtAqBQIB//vOfYfsuu+wynE5nPT/ROpmWiTM4+MlSoGtgmT4g8tQ/KM6IFnuHT0pKwjCMWpmE/Pz8WhmHmlavXs2dd97J66+/zsiRI8OOpaSkNLpNt9uN2+2u97gQQrRGRWUlKL36Y0BvQmDhqQosTB8Bl47hqq6wbTlBC1bi05SOGbAwHJK5EEI0XcD0U5Vr9ePEjR9LMhYtpsXe2V0uFwMGDCAnJydsf05ODoMHD67351555RWysrJ4+eWXue6662odv+qqq2q1+c4775yyTSGEaIuOfXMsfIcWQcAK1H3yt/AE51jolg+/rqOfNMfCcFsovXrWtt8r45+FEM3DCi4MYSodf7Aoj2We3vuYaLoWfWQ0Y8YMli1bxvLly9m/fz8PPvggubm5ofLss2bN4rbbbgud/8orr3Dbbbfxu9/9jiuvvJIjR45w5MgRiouLQ+dMmzaNd955hyeffJLPPvuMJ598ko0bNzJ9+vSzfXtCCHFOK8ovCN+hRVLuL6/75G/h9QQnb5s+ArqB4T5p+Ue3BS6FFpxQWVXzQgjR+gwfPvyMfeeqq20ruIZ1AB0z+LVWAouW06KBxcSJE1m0aBFz586lX79+vPfee/ztb3+ja9euAOTl5YXVtHjhhRcIBALcf//9pKamhrZp06aFzhk8eDCvvvoqL774IhkZGaxYsYLVq1dzxRVXnPX7E0KIc1n58fAV8JQeSZm/7LTa8oaWm/Xh1zQM58kZCxPTqYWK5FXVvBBCiMZYu3Ytjz32WNi+qiDCRCcQzFhgntn3mP3793PDDTcQHx9PbGwsV155Zb012NqaFp9FN3XqVKZOnVrnsRUrVoS93rJlS4PavOmmm7jpppua2DMhhGjdfCfK0ACnrxi/Kx7TiKDUUwoxjW/L7wsA9rKyPqeO66THVobbxHTodi0LR5QMhRJCNIrf78fpdIaVF6iigplQEwNLs994Ti6Y19wOHjzId7/7Xe68807mzJlDfHw8+/fvJyLi9OeotSYye04IIdqoQKm9RKzLf8LeoekUFBWdVltVwULV5O2TGW4Ty1FdfVsyFkK0bpZlMXPmTBITE0lJSSE7OzvseG5uLuPGjSMmJoa4uDgyMzM5evRo6Hh2djb9+vVj+fLldO/eHbfbjVIqbCjUli1b0DSNxHad0S64nLjOvelwQQ+yps8ODbtcunQpPXr0wOVy0atXL1atWhXWD03TWLZsGRMmTCAqKoqePXuyfv36U97br371K8aOHcv8+fPp378/3bt357rrrqu1QmlbJYGFEEK0VeX22GRDlaAFJ20fzzt2qp+oV1WwYFg+TFf48t6Gy8LSNYxgkTyvR1ZsEaKxlFL4PZ6wzfT5wraax5tra2x9jJUrVxIdHc2uXbuYP38+c+fODS2so5Ri/PjxFBYWsnXrVnJycjh48CATJ04Ma+PAgQO89tprrFmzhj179tS6xuDBg8nLy2PfJ7vI+/gd3nhtFRERboZe2R9DmbzxxhtMmzaNhx56iE8//ZR77rmHO+64g82bN4e1M2fOHDIzM9m7dy9jx45l0qRJFBYW1nlflmXx17/+lYsvvpjRo0fToUMHrrjiCtatW9eo309r1uJDoYQQQrSMqpp1BhUYZiUBPZaSI3V/oH4b02d/8dBNH4Eay+g73Cam4bKHQgEVHs9p91mItirg9fL07ace5r3tDF37gZV/xtmIoT4ZGRnMnj0bgJ49e/LMM8+wadMmRo0axcaNG9m7dy+HDh0K1QxbtWoVffr0Yffu3QwaNAgAn8/HqlWrSE5OrvMaLpeLlJQUIrRyzGPlPDjz19x8y81MuXk8lcpiwYIFZGVlhYbbz5gxg507d7JgwQJGjBgRaicrK4tbbrkFgHnz5rFkyRI++OADxowZU+ua+fn5lJWV8cQTT/Cb3/yGJ598krfeeosf/OAHbN68mWHDhjX4d9RaScZCCCHaKN1nT3TUNQ+GaQ+LKi8sOdWP1MsMrvRkWD4sZ1XGwv6v4TYxdQPDtCMZCSyEaN0yMjLCXqemppKfnw/YE5/T0tLCChGnp6eTkJDA/v37Q/u6du1ab1BxMtPn4ca7H+aCCzox7/HfAHYl7v379zNkyJCwc4cMGRJ2jZp9jY6OJjY2NtTXmizLfp8bN24cDz74IP369eMXv/gF119/Pc8///y39rUtaHTG4ujRo/zsZz9j06ZN5Ofn10qPmWd4Jr4QQojmoQcc4ABd92FYdmDhLzq95WaVT6FhZyysYMbC6UjCHziG4Tbx69VzLCorvM3RfSHaFIfbzQMr/xx67ff7+eSTT8LO6du37xmpvO1oZBHhmn3QNC30pVwphaZptX6m5v7o6OgGXWvGz2eT+/UR/vrW33FHuKHcDiyqrnuqa3xbX2tKSkrC4XCQnp4etr93795s23am8kXnl0YHFllZWeTm5vLII4+Qmppa5z8OIYQQ5z7NcqEAwwig++3Awio9vWyC8tn5CU15UMHP6Qj3BcHAwqJS1zF8dmDh8UpgIURjaZoWPhzJMDBcrrBznBERZySwaE7p6enk5uZy+PDhUNZi3759FBcX07t370a1tXDhQtau/zs71q8gMSkJh2Hfu4HikksuYdu2bWH10LZv397oa5zM5XIxaNAgPv/887D9//73v0OlEtq6RgcW27Zt4/3336dfv35noDtCCCHOFt1yYwIOl4XuswMLVXmaWeeA/ZDJ1P0YwarbkVFplJbvQdPA79IxyoOBhcdXbzNCiNZt5MiRZGRkMGnSJBYtWkQgEGDq1KkMGzaMgQMHNridjRs3MnPmTBb/9pckJSZw7FgBUVGRmJWltIuPZdqD05k86UdcfvnlXHPNNWzYsIG1a9eycePGJvX/4YcfZuLEiQwdOpQRI0bw1ltvsWHDhgaXRGjtGj3HIi0trdGrAwghhDj3aNhPP51u0LEzFdrpJhMCwYq3hi9Uddsd0QEN+4lqwH3yqlASWAjRVmmaxrp162jXrh1Dhw5l5MiRdO/endWrVzeqnW3btmGaJj/5xWOk9r+Wvn360qPbRUyb/TsArr9+LIsXL+app56iT58+vPDCC7z44osMHz68Sf2fMGECzz//PPPnz6dv374sW7aMNWvW8N3vfrdJ7bYWmmpklPDOO+/wu9/9jhdeeIELL7zwDHWrZZWUlBAfH09xcTFxcXEt3R0hhGh2Sin+546X8Eek0bvdWxz9OoLCqOEo9R4/eSG70e09M/UtNMtFr89nU3BTBUm9i+jebTr//e9LmFYheW+kkHDgu/y362jaDVLceuc1zX9TQrQSHo+HQ4cO0a1bt3oLr/n9fv75z3+G7bvsssvO+aFQzclnmriO7gWgKK4nsVFu/HmfE6H5KY69kPjYdi3cw/PHqf7NNeZ7caOHQk2cOJGKigp69OhBVFRUrX/A9a39K4QQ4txREahA6ZEARMdFY+TZQ6G0mmvFNoBSCiz740Q5AhguO2PhcMThMGIxrUJMlxZaFUoqbwvRdIZh0L1791r72hLTtGviKAWG4UTXDAIYgB/LlHo5LaHRgcWiRYvOQDeEEEKcTaW+UpRuP5WKSYjFcNjLzOpW41Z/AbACCi04slYZgdAcC4cjFqczAa//v1gu0INDoSSwEKLpdF0nMTGxpbvRoszgSnMmOg7DQNN0TC34XiSBRYtodGBx++23n4l+CCGEOItKvCWYhp2xiOnQDofzMCjQVeMDC7+vOlCwdCssY+FytYMKwKXQghmLqpoXQgjRFFVZiQAGhh6c51U1fdgKtFS32rTTqrxtmibr1q1j//79aJpGeno6N9xwQ5tLwQkhxPnq+IkToNnv2bGdknFHaFAJ0PDqulUCwUBBs0yUQ2G4qzMWLrc9xtnhtkBJYCGEaD6WaQcPJjou3V6ZztJ0UPb7kTj7Gh1YHDhwgLFjx/L111/Tq1cvlFL8+9//Ji0tjb/+9a/06NHjTPRTCCFEMzr21VEgBk2ZxHVKJSLKaQcWWiSmZWLoDX9QFAhmLHTLh6XrYRkLpysBsKtvK+xhC5ZXAgshRNNVBxYGelVgoetggq4ksGgJjV5u9oEHHqBHjx4cPnyYjz76iI8//pjc3Fy6devGAw88cCb6KIQQopmV5BcBYAQqccQnE5MQrHKrRVIeaFz17aqhUIbpwzT0k+ZYxOFwxNvH3CZWcC1bJavNCiGaQ3C4U9W8CgCCfzYksGgRjc5YbN26lZ07d4ZNGGrfvj1PPPEEQ4YMadbOCSGEODMqCgqBzhimByLiiUmMg4NgGRGU+cqIczV8qe2qydiG5cN0aOgOexVzhyMWp8Nux3CbBLTgmGe/1qz3IoRomzTLfohhnfycPJi5kMCiZTQ6sHC73ZSWltbaX1ZWhqtGaXkhhBDnpsAJOyuhq0pwxRKTbD8ssoxISjwldIrp1OC2Kjz2UrW66UNVTdFQGg5HDA6nnbFwuC0sR3CVlkCjk+VCiBqkjgVoweDB0nTMUh/KUuhGMGOBDLlsCY1+d7/++uu5++672bVrF0oplFLs3LmTe++9lxtuuOFM9FEIIUQzM8vsYUm6VQG6TtwFHUPHjuYfb1RblZV21W7D8mG57aeFGm40Tcd58lAo3c5YaKYs9CFEazV8+HCmT59+VtqumkehNAOz2ItV6gutDuXAxGpcDWjRDBodWDz99NP06NGDq666ioiICCIiIhgyZAgXXXQRixcvPhN9FEII0dwqgxOusYOCmNQU9OCa8Ce+aVxgUeGxgxTDrA4sdM2es+FwBodCuUxMPTg0wTqtBQmFEG3c2rVreeyxx0KvDWVnJdRJcywcwYUndBTmGVgZStO0Orennnqq2a91Pmr0u3tCQgJvvvkmX3zxBZ999hlKKdLT07nooovORP+EEEKcAcF51KHAQo9KxDD/g2W4KDlW2Ki2PJ5gG6YPMzgi1jBiAMIyFqZhPz3UMDBNC8OQIVFCiG/n9/txOp21CgJWBRbaSavYOXQDS9lTLfwBP06jeR9k5OXlhb3++9//zp133smNN97YrNc5X532u3rPnj35/ve/zw033CBBhRBCnGd0f/CpXlWEERGPYdkBQkVBcaPaqvTYmQ7D8mHVCCwczgT7tdvC0qufHgak+rYQrZZlWcycOZPExERSUlLIzs4OO56bm8u4ceOIiYkhLi6OzMxMjh49GjqenZ1Nv379WL58Od27d8ftdqOUChsKtXnzZiIvuBTtgsvp2KET7rQ43Glx3H3PTzGx39+eX7qUHj164HK56NWrF6tWrQrrh6ZpLFu2jAkTJhAVFUXPnj1Zv379Ke8tJSUlbHvzzTcZMWIE3bt3b/ovrhVoUBg3Y8YMHnvsMaKjo5kxY8Ypz124cGGzdEwIIcSZowVcKBfoRnDtV1eMPd8CCBQ3brlZb1VgYfpQbjsr4QxO2q5aFUrTwIrU0JSJ0gz8Xgt3VHPciRDiXLNy5UpmzJjBrl272LFjB1lZWQwZMoRRo0ahlGL8+PFER0ezdetWAoEAU6dOZeLEiWzZsiXUxoEDB3jttddYs2ZNnQWYr7xqMF9/nIOO4oSrE4c//ZJxt93EdwcPIaAZ/OXvOfxi5iwWLVrEyJEj+ctf/sIdd9xB586dGTFiRKidOXPmMH/+fJ566imWLFnCpEmT+O9//1srO1KXo0eP8te//pWVK1c2y++tNWhQYPHxxx/j9/tDfxZCCHF+05ULEzAcwSVgdR1d2RkLq6xxhSZ8XvvzQbO8qGDGwhlcrlbXXWi4UPhQUcHhUo7IUFE9IUTDKKVQ/uqVjpTfRAuET05WPhNLNf8QQ82po2kNXyY6IyOD2bNnA/YIl2eeeYZNmzYxatQoNm7cyN69ezl06BBpaWkArFq1ij59+rB7924GDRoEgM/nY9WqVSQnJ9d5Dd1h0KlDewAqyiO5b+ZPuX3ij8j60W34yr5mwfOruPmWiUydOhWwH5Lv3LmTBQsWhAUWWVlZ3HLLLQDMmzePJUuW8MEHHzBmzJhvvc+VK1cSGxvLD37wgwb/blq7BgUWmzdvrvPPQgghzk9acF1Yh6v6i4mOvWwslY1bptHnCwAGuulFDxbHc7nbVberxWCqQiy3wvB4MR2RoaJ6QoiGUX6Lbx7dHravY41z8tftPiPX7jR3MJqr4au5ZWRkhL1OTU0lPz8fgP3795OWlhYKKgDS09NJSEhg//79ocCia9eu9QYVAFawOJ7XF+D2rNvpckEaC+fMR1M6Fjr7Dxzi1tuzwn5myJAhtRYaOrmv0dHRxMbGhvr6bZYvX86kSZOIiIj49pPbiEaHtVOmTKmzjkV5eTlTpkxplk4JIYQ4szTsD0JXhH7SPjtjofkaV8CuqkCeUp5Q1W23u3oYgcOItY9HgGH5gz8TOM2eCyHOdTVraWiahhUsZqeUqjP7UXN/dHT0Ka9hBlexu2/W43z11Ve8/PxKHA4HmBpmsB2txnKzdV37VH09lffff5/PP/+cH//4x996blvS6KnyK1eu5IknniA2NjZsf2VlJS+99BLLly9vts4JIYRofkopCAYWkTHu0H5DD9a28DeuzkTVRGzN8mC47D87nNWfEQ5nPN6AfUnDtK9RXlkJtKvZlBCiHppTp9PcwaHXAb+fTz75JOycvn374jgDBfI0Z/MNr0pPTyc3N5fDhw+Hshb79u2juLiY3r17N7gdM+Bn4Qv/y+sb3mbrXzbTvp09LApLx9J0el/UjV27djH1p9U/s3379kZd41T++Mc/MmDAAC677LJmaa+1aHBgUVJSEiqIV1paGpb2MU2Tv/3tb3To0OGMdFIIIUTzqQxUonR75nRMfPVTQV0PzpWoWjO2gUyPnX1Qyofhtp/0OYKTtgGcrgSoBOUGPXByYCGEaChN08KGI2mahXKEP33XXAa689wuQDly5EgyMjKYNGkSixYtCk3eHjZsGAMHDmxwO+9u2sTM3y5m4W9/TVJCe47k26tKRcVFQ6zGw/fdRua9v+Cq7z7PNddcw4YNG1i7di0bN25s8j2UlJTw+uuv87vf/a7JbbU2DQ5BExISSExMRNM0Lr74Ytq1axfakpKSmDJlCvfff/+Z7KsQQohmUOorRen2w6G49tVZA8NpBwi6ctf5c/WxvMEhDnirMxaO6oxF1bAow2WiKzugqAzWvhBCtC2aprFu3TratWvH0KFDGTlyJN27d2f16tWNamfn9p2Ypsm0X8yha/+L6DqgJ10H9GTGrx4GXWP8mBHMnzOLp556ij59+vDCCy/w4osvMnz48Cbfw6uvvopSKjTpW1RrcMZi8+bNKKX4f//v/7FmzZqwZbhcLhddu3alU6dOZ6STQgghms+JimJMww4sYlOSQvudLgUB0FXjJiJaXnscs6X5cbpqZyyqJnIbbgtl2kvaVlZ6T/8GhBDnrJOXjK2ybt26sNddunThzTffrLeN7OzsWrUvarb984en8eT0yZwghmgrpfokQ6NUPwbAPbdnMn3WnHqvo2rMwQAoKiqq9/wqd999N3ffffe3ntcWNTiwGDZsGEBoeTBdl4qpQghxPsrPPwaa/R6e0Ln6A9kVqUMpVM2/aCjlBw0wDX8oY+E8KbBwnFR9OxCsleHxNG5JWyGEOJlm2e81plbj+6il0A17iJihZPW5s63Rk7e7du1KUVERH3zwAfn5+bVmzt92223N1jkhhBDN7/jX+UA8muUnsuOFof2R0RF2YKFFYCkLveYHdn0C9oe4pZmhVaGUHsPiL48yLDGW9s6TAwt7KJQ3WPtCCCFOhx4MGlRwVL/m0FEBCxQYuv2e5MDCtCwMeRh+1jQ6sNiwYQOTJk2ivLyc2NjYsGW7NE2TwEIIIc5xZfnHgXgM04MeXT3HIiYhBo6A0iOp8FcQ44ppWIMB+0PbMvyhydvby5w8fiiPzYUlPN/JDiwcbhNvsAifzyOBhRBNYRgGXbp0qbWvrdBV8CFG8AFIwFBYpolTGTgMe5+uKSoDJpEuCSzOlkYHFg899BBTpkxh3rx5REVFnYk+CSGEOIMqjxcBYJiV4K4eshSXZAcZlh5Jmb+s4YGFZX+ZUe7qYCE/YC95+Y3XH5pvYbgtUPbcCp/UsRCiSXRdb9OrcYaGOQUDiyJ/KUpTJKoYNM3AUhq6pvAH/ES6mn8JXlG3RodwX3/9NQ888ECzBRXPPfcc3bp1IyIiggEDBvD+++/Xe25eXh633norvXr1Qtd1pk+fXuucFStW2Euy1dg8sgKJEEIAECix5znoqhL06iecsan2RG7LcFNYVtTwBq3gM6rIYLCgHJwI/vG4P4DzpKFQCntuhRTIE0I0hYGdsdA0HQuFIriIBBaa0kNzLwIByY6eTY0OLEaPHs0//vGPZrn46tWrmT59Or/61a/4+OOPufrqq/ne975Hbm5uned7vV6Sk5P51a9+dcqCJHFxceTl5YVtUm5dCCFsqtz+cq+r8Acu8SdN5D56JL9hbSkFKvg0MCJYKI9ICvwBnJafCtPCp1VlLEwsZX/IBzwyqVIIcXqUUhhUvd/oWFTP97VQaOgEsB+aWKYEFmdTo4dCXXfddTz88MPs27ePvn371iqFfsMNNzS4rYULF3LnnXeGyqEvWrSIt99+m6VLl/L444/XOv/CCy9k8eLFAKes8K1pGikpKfUeF0KINs1jP9nTVHiRusikFHTzMywjghPfHIMGFJQ1AxYa9lw7FWF/uBtaNNqJL/ns/27iT6nXUTzgt/Z+lxXKWFiSsRBCnCZTqVDGwlB2xqKKpVmgdMzgs3NlynvN2dTowOKuu+4CYO7cubWOaZqGaTbsKZTP5+PDDz/kF7/4Rdj+a6+9lu3btze2W2HKysro2rUrpmnSr18/HnvsMfr379+kNoUQorXQfHYgoGvhGQstMgHD9GAZEZQeO9GgtgK+6ieFmtv+cDeMGJKPfkS0VcnQEx9SbEXaxzVQEfYci6raF0II0VimZYa+wBrKwNTCMxZYOpamgwLNksDibGr0UCjLsurdGhpUABw/fhzTNOnYsWPY/o4dO3LkyJHGdivkkksuYcWKFaxfv55XXnmFiIgIhgwZwhdffFHvz3i9XkpKSsI2IYRorTS/PURA12vUknDH2RO6AW9BUYPa8nuDwxGsAKoqsHDE4qgoACDZX0hBAFAuAFSkPSxB+SSwEEKcHjM4vMlUOoalhWUsTBSa0rGCq5ZW1bsQZ0eT1t9qjgnRJy9XC/a4uZr7GuPKK6/kRz/6EZdddhlXX301r732GhdffDFLliyp92cef/xx4uPjQ1taWtppX18IIc51uml/ydeNGmOPDYc9oRvwByd4f5uAz/7QNkwfmsv+cHc643F77MCinb+EQq8HQ4+2fyDSDmaU//Tf54UQ4Pf72bNnT9jm97f8fILhw4fXubhOc7ZtBuwshImOoYwacyysYMYiGFgoyVicTY0OLEzT5LHHHuOCCy4gJiaG//znPwA88sgj/PGPf2xwO0lJSRiGUSs7kZ+fXyuL0RS6rjNo0KBTZixmzZpFcXFxaDt8+HCzXV8IIc41mrIXszCctZ/kVQUWVRO8v43PZ39o65YPghkLpyOeKE+hvR9FeckxDD3W/oHgBO+q2hdCiNMXCATCttZu7dq1PPbYY5im/f4UQA9mJ06aY4FCszRU8NlFc1ffLisr4yc/+QmdO3cmMjKS3r17s3Tp0ma9xvms0e/sv/3tb1mxYgXz58/H5XKF9vft25dly5Y1uB2Xy8WAAQPIyckJ25+Tk8PgwYMb2616KaXYs2cPqamp9Z7jdruJi4sL24QQorXSlBsAp7uOYwQz0Z6GDVWqqLADET1QiR7MWJjuJNr5qudoeMvycTjsJWe1qiVpTQkshBANU5WJSUxMJDY2Fis4IdvUdDQ0TMIDCyw99A23uQOLBx98kLfeeov//d//Zf/+/Tz44IP89Kc/5c0332zW65yvGv3O/tJLL/GHP/yBSZMmhVV4zMjI4LPPPmtUWzNmzGDZsmUsX7489JeTm5vLvffeC9iZhJqVvKvSfWVlZRw7dow9e/awb9++0PE5c+bw9ttv85///Ic9e/Zw5513smfPnlCbQgjR1mnYGQtnZO31O6omdGu+hn08lFfa5+tWOYbb/gAvdybT3lcUOscqzcfpSrDbDVbmxmz02iFCiPOEZVnMnDmTxMREUlJSyM7ODjuem5vLuHHjiImJIS4ujszMTI4ePRo6np2dTb9+/Vi+fDndu3fH7XajlKoeZmUG2LL9H8RdkI47LY7UC1K54IILuOCCC5j24DSUBZquWLrydTKGXIvL5aJXr16sWrUqrB+aprFs2TImTJhAVFQUPXv2ZP369ae8tx07dnD77bczfPhwLrzwQu6++24uu+yyZivFcL47rQJ5F110Ua39lmU1emzfxIkTWbRoEXPnzqVfv3689957/O1vf6Nr166AXRCvZk2L/v37079/fz788ENefvll+vfvz9ixY0PHi4qKuPvuu+nduzfXXnstX3/9Ne+99x7f+c53GnurQgjR6iilUJq9SlN0bGSt41UTurVAw774V3qDGQ6zAsNlBw1lejuS/EXV1yw/hsttV/XWIyyUMqtrXwghWp2VK1cSHR3Nrl27mD9/PnPnzg2NUFFKMX78eAoLC9m6dSs5OTkcPHiQiRMnhrVx4MABXnvtNdasWcOePXvCL2CZDB54Gfv2/B+HPvw3H3/8Ma+99hoRERFceeWVWJbF3/6Ww7TZT/Hg3bex5597ueeee7jjjjvYvHlzWFNz5swhMzOTvXv3MnbsWCZNmkRhYWG99/bd736X9evX8/XXX6OUYvPmzfz73/9m9OjRzfK7O981+pFRnz59eP/990Nf/qu8/vrrp7Wk69SpU5k6dWqdx1asWFFrn1KnTs///ve/5/e//32j+yGEEG2Bx/SAbgcUMe1qD/vUjWDxPNNV61hdKivt5WM1VYHhsjMWpSqO9icFFo6KY7jdiQAYbguUD02LxDQtDEOGRAnREEqpsAe4fr+/1rwKn8/3rd+TTofT6WzUwjoZGRnMnj0bgJ49e/LMM8+wadMmRo0axcaNG9m7dy+HDh0KLZazatUq+vTpw+7duxk0aFDoXlatWkVycnKt9nUVwOVyktyhPVFme8qKvMycOZObb76Zm2++GdMyeeaZ5WRlfp/7b/8h3uSLmDFjBjt37mTBggWMGDEi1FZWVha33HILAPPmzWPJkiV88MEHjBkzps57e/rpp7nrrrvo3LkzDocDXddZtmwZ3/3udxv8+2nNGh1YzJ49m8mTJ/P1119jWRZr167l888/56WXXuIvf/nLmeijEEKIZlLqLcU07MAioUP7WscdTjvrUDUP49t4PMHAwvTYQQNQbjqINatXlYqoLMDlugSwq28r5UEjkoDPwoiUwEKIhvD7/cybN++U57z11ltn5Nq//OUvw+bVfpuMjIyw16mpqeTn5wOwf/9+0tLSwlbgTE9PJyEhgf3794cCi65du9YZVABolv1eozQdr9/H3XffTefOnZk3bx5KKSyl+Pe/D3LfzePQNYXfNInEwZAhQ0KFluvqa3R0NLGxsaG+1uXpp59m586drF+/nq5du/Lee+8xdepUUlNTGTlyZIN/R61VowOL73//+6xevZp58+ahaRqPPvool19+ORs2bGDUqFFnoo9CCCGayfHSE1iGPcci4YKUWscdbsBbvXLUt/F6g09QLU8oY+Gv8IadE+0pwAhO3na4TLAqwWhHwGvirmOehxDi/OZ0hg911DQNqyoYqKesQM390dHR9bZvYL/XKHSm/XIGX3/9NZs2bcLtduPxeOzq24AKthcI+AF3ndc+VV9rqqys5Je//CVvvPEG1113HWAHJnv27GHBggUSWHAagQXA6NGjZSyZEEKch45+Xb3Ed3xap1rH3ZFO8AJaIwMLPKE5FmZledg5ib4TePQEwM5YaGYZOMHvk8JVQjSU0+nkl7/8Zei13+/nk08+CTunb9++tb4oN9e1m0t6ejq5ubkcPnw4lLXYt28fxcXF9O7du0FtVK309Pzzy3hjwzreXL8+LLthoejV6yLe3/1Psn54PWbAfp/avn17g69RF7/fj9/vR9fDM62GYdQbjLQ1jQ4sunfvzu7du2nfPjyFXlRUxOWXXx6qayGEEOLcU/jNESAZ3fThSuxQ63hkbAQUAVpkgwqWej0+wAGWFz2YsbAqS8POSfadoAQ7Y2G4LXSzHEV11W4hxLfTNC1sOJKmaTgc4V/jXC7XGQksmtPIkSPJyMhg0qRJLFq0iEAgwNSpUxk2bBgDBw5sUBsGFhvf28Xc3zzOk799gsTERI4fP45SikAggDs2kenT7mXybfcx6NJe9BtxA6v+7/9Yu3YtGzduPO2+x8XFMWzYMB5++GEiIyPp2rUrW7du5aWXXmLhwoWn3W5r0ujBrV9++SWmWfvDwOv18vXXXzdLp4QQQpwZZUePAWCYleCOQynFC4fz2VRQAkBscEK30iOpCHx79W1/RbCQnsODHlyBXK8sAsDrsttq7z9BMXaBPMNtgmW3W15Z2Sz3JIQ4f2iaxrp162jXrh1Dhw5l5MiRdO/endWrVzfo55VSGFhs2/0xpmnys188TP/+/bnooovo2bMnjz76KBaKG64fy/y5v+Sp519i6NVX88ILL/Diiy8yfPjwJvX/1VdfZdCgQUyaNIn09HSeeOIJfvvb30pZg6AGZyxOXtf37bffJj4+PvTaNE02bdrEhRde2KydE0II0bx8J+wAQrcqwXDweXklf/j0YyKi2nHNsO8Q3yEJANOIpNRXSrSz/nHOAGZlcFUaZzBIUBqOCrs4Xnn7XrjzdpPsO8EHZhTxgMNtolmVKKBCAgshWp0tW7bU2rdu3bqw1126dDllQbns7OxatS+q2vZbJsaRvWQ/dC8PPpxNpWUPfaoaSVNQUIClFBo6U7JuZfpt48nXk+iQklarvbpW0CoqKjrl/aWkpPDiiy+e8py2rMGBxfjx40N/vv3228OOOZ1OLrzwQn73u981W8eEEEI0v0BpsFK2sutPHCv8hu0fTGJfTA/8V+8k/oKOACjdyfGiAlKia0/wPpnpCQYWEcEJ2yoCd2UBAGVxF5GYtxu38lPu8xNP1XKzdsZCAgshRGOZZgAnoBQY6FjBidyGYYQCBQsLLAdWcCin3szVt0X9GhxYVE1K6datG7t37yYpKemMdUoIIcSZocrtSYyasr/U+/M/w6389Ck7wHGfj/adUwF7qcX8b/Lhgj6nbM/yBicsRgTrX2iRRHnswOLVTy1+4ookSlXiKy8J/YzuKsOiugaGEEI0VFXtDhMdXVXPATs5sFCa/T8SWJx9jZ5jMWfOHGJjY2vt9/l8vPTSS83SKSGEEGdI8Lu8jp2x8JXYq0S5VICCkmO42nXACNhBR+GR49/anOWzP8g1tx2wmFoc8b4iAI4Tx3FlD5sNlB4HZU881Z12xqLSI4GFEKJxqlZ4MjFQwcBC13U0TbP/GzxPWRpVLwxlnpHCgaK2RgcWd9xxB8XFxbX2l5aWcscddzRLp4QQQpwZms+eYa1p9pd6q7R6+dmSE9+AO86e2A2UByd6n1JwJJQWYX/Yl+spJPntORaFKo7jyn4QZZUfQ9eiANDddvsej6+JdyNE26XrOp06dQrbai6D2hpZZtVDDB0rGDkYhhE6rgdXkbBMQLeDCQMTS+KKs6LRy83Wt/zgV199FTahWwghxLlHCzjABbpuf6k3yqsrzFaWHAHnd+yJ3YD3RGmdbZxM+e0vMprLjjDKjSTa+/8BQIGK45gVDwbo5ccwEmOxrCI7sKgEX6gGhhCisQzDoFOn2rVoWjtlVg+FshSghQcWhq5jWiaWBZqzKrCwCFgWhm7U1aRoRg0OLPr374+maWiaxjXXXBO2drJpmhw6dIgxY8ackU4KIYRoHpoVHI7ksD+cjZKjoWP+kjz7WHD+hVny7cvNYgYDC7c9hrnCkUxScChUAXEUKHvJWWdlAQ5HHH4faG4fVILXI4GFEKJxlFUdWFQlIcIzFvZ7kmUBmn2GAxOfqXCfVllo0RiNXhVqz549jB49mpiYmNAxl8vFhRdeyEUXXdTsHRRCCNF8dOUGCFXJjimrzliYpXaQoQVXjFIVgW9tT5kGGtWBhVfFEmXZP1+g4jgWLIwXVXkcpzOeSh/owfkYgQoZCiWEaBzNChbi1HSUst/HwjIWwT9bFmha8DgmAamMfVY0OLCYPXs2ABdeeCETJ04kIiICgOLiYv70pz/x1FNP8c9//rPO4nlCCCHOFZEAOCPsp3oJ3oLQEa3YDix0zQ4MaMjcauVAKdNeRhZQwQyGBxflRIQyFjGeQgxnN/s6br9dIbdSAgshRONUrfBkaRpWMGdx8tySqsDCVBbO4Mh9XYNAQL6fng2NnuVz++23ExERwbvvvsuPfvQjUlNTWbJkCWPHjuUf//jHmeijEEKIZmM/FHJHO6k0LZICJ0JHXMHshVYVWPgbMB5ZOUD5QhkQgrFCoYoFtNCqUEm+QryuVACMCBMIYHrkg14I0ThVgYVCs+tVUE/GAoV+0gRv05Shl2dDo0abffXVV6xYsYLly5dTXl5OZmYmfr+fNWvWkJ6efqb6KIQQohkopUCzMxaRcTEcq6ygs1VdXyK20l4Fqmpitx449UeEUgoNF5YqQnfZH/a6z05zHA9mKgqqAgt/EYecHdABh9sC5amugSGEaDWGDx9Ov379WLRo0Rlpu89FXXh27oOg6VjBORRhcywcVYGFhaYZBDBwEcAKSGBxNjQ4YzF27FjS09P517/+xZIlS/jmm29YsmTJmeybEEKIZuQ1vVi6nbGIb9+OouIjJ01/hHY+e1iU7rCDhKqJ3vUJ+IOBgfKFhkIZHnvCd2EwsDiO/d9k3wnKtAT7HLeJUt5QDQwhROMFAgE+/fTTsK2qeFxrtXbtWrJn/iT4Krw4Xs0/Wyg0dEzN/qpbNem7qY4ePUpWVhadOnUiKiqKMWPG8MUXXzRL261BgzMW77zzDg888AD33XcfPXv2PJN9EkIIcQYUe0uxDDtjkdixA98U5YUdT/afoDxgYjgtsEALTvSuT8AXHJKgvBjBjIXTYy9RWxAMKKqGQsWbZZSZkcRhBxYoL8p/6sBFCFE/pRQej6fWvtbI7/fjdDpJTEzE74kAzFBYUVUcj5NeA8G4IxhYKIKFLZpGKcX48eNxOp28+eabxMXFsXDhQkaOHMm+ffuIjo5u8jXOdw3OWLz//vuUlpYycOBArrjiCp555hmOHWtA8SQhhBDnhCPH8rEMO1ho1+UCu24FcNhKtvepUo5VluOMsD+kNRVxyvYCPjtLoVkVoTkWbq89tKpAxeHQNYqJxo/9BNEXnKxtuEyU5YFA6y/mJURbZFkWM2fOJDExkZSUFLKzs8OO5+bmMm7cOGJiYoiLiyMzM5OjR6uXvs7OzqZfv3489/RiLuzaFbfbjVKK4cOH87NHnwRg+//t5IILLiA1NTVUDkHTNKZMmYIWDDv+53/+RN8rR+O68DtcPeRqVq1aFdYPTdNYtmwZEyZMICoqip49e7J+/fp67+uLL75g586dLF26lEGDBtGrVy+ee+45ysrKeOWVV5rpt3d+a/C7+lVXXcX//M//kJeXxz333MOrr77KBRdcgGVZ5OTkUFr67YWUhBBCtJyjh78K/Tm28wWhuhUHVCd8yv7yX3QiD3d0MJMQnI9Rn6oCd5pZYWchgGiPPRm8QMXRs6M9gbtqZSiz0q6PYbgtUN5QDQwhROuycuVKoqOj2bVrF/Pnz2fu3Lnk5OQA1U/9CwsL2bp1Kzk5ORw8eJCJEyeGtXHgwAH+/Oc1/M+Sp9m9a1foZ7Xg8M0rBg3g448/Zt++feTl5fHuu+8SERHB0KFD0TWdv//978z8+Vzuv+8OPt30Orf96GbuuOMONm/eHHadOXPmkJmZyd69exk7diyTJk2isLCwzvvyeu05ZFUro4I99MrlcrFt27bm+eWd5xr9rh4VFcWUKVPYtm0bn3zyCQ899BBPPPEEHTp04IYbbjgTfRRCCNEMivO+AUA3PRixiVjFdsbiqGrH8WC9ibLiPKLiogBQWuQph1aUV9iBgqbKMJwKC53YYHG8QmK5tFP4cChVYT+AcgTnWGBJtSohGkophWlWhG1KecK2mseba2vsEKuMjAxmz55Nz549ue222xg4cCCbNm0CYOPGjezdu5eXX36ZAQMGcMUVV7Bq1Sq2bt3K7t27Q234fD6eXvAUffuk0ye9N5qmhfphKQ2Xy02HDh3o1KkTTqeTu+66iylTpjBlyhQMTeP5559n0q03ceeUSVzcoyv3330HE37wAxYsWBDW16ysLG655RYuuugi5s2bR3l5OR988EGd93XJJZfQtWtXZs2axYkTJ/D5fDzxxBMcOXKEvLy8On+mrWnSu3qvXr2YP38+jz/+OBs2bGD58uXN1S8hhBDNrOJYIdAVw/SA4QhV3c4ngXyVQCetEE/JNyQmJcBBUHoElYFKopxRdbZXXpWBcNgBQykx9AgGFsdVPIOSo4lyGaGMhV5ZDDHBjAUVKAkshGgwy6pky9a+pzzn/7afmWsPH/YJhlH3+0BdMjIywl6npqaSn28vZ71//37S0tJIS0sLHU9PTychIYH9+/czaNAgALp26UJS+0QArEDVfC47sDi56rZlWdx444106dKFxYsXA6BrBgcOHODOrFtQwUfoDkyuumowzyx5ut6+RkdHExsbG+prTU6nkzVr1nDnnXeSmJiIYRiMHDmS733vew3+3bR2zfKubhgG48ePD1XnFkIIce7xF9kBgG7ZAUFEuf3heUKL4ZhKACBQcpTEjl0BMI1ISrwl9QYWFZ6qwKIMgFIrkfb+XMCuY9EhNoIOsW6Ol9gZC2flCYixf1Z3lAHO5r1BIcQ5wekM//+2pmlYwcrXSqmwydZVau6Piqp+37HMQOgcAFPTsZR97oMPPkhubi67d+/G4bC/1hqhgnlaaPEoAwvTNGtd+1R9rcuAAQPYs2cPxcXF+Hw+kpOTueKKKxg4cGC9P9OWyOMiIYRoI8wye3ywruyAIM5jLy8biIRjFfaXf7PoCPGXXgVUoHQHR48fJyUmpc72KiuD7bnswKJMtae9fy8ABcSTHOumQ2wEx4vtjIW7ssAe/qQHMFxlKMuBZVrohsy1EOLb6Hokw4d9Enrt9/v55JNPws7p27dvrS/KzXXt5pKenk5ubi6HDx8OZS327dtHcXExvXv3Dp138vArM7SMblXGwsBS8MILL7BmzRp27NhB+/btq/trGFx00UXs2PUPMm8dC9gZix07doRdoyni4+33zC+++IJ//OMfPPbYY83S7vlOAgshhGgjVGWwPoXyoJSivd8OLEy34oQ3FgBH6RHiOqWC+gI0nfyvjsCFl9bZnscTDCycdqDiNROItOyVnwpULB1i3XSIc4fmWER5jqOrSCxK0V1lmB57ZSlXpAQWQnwbTdPChiNZlh9NC1+5zTCiMIxzOxM4cuRIMjIymDRpEosWLSIQCDB16lSGDRtW46l/dWBRK2OBzuat7/Pb3/6Wp59+mqSkJI4cseeMRUZG4jQM7rvvPu69914y+vXkhu/0Z13O+6x/cx0bN25sUv9ff/11kpOT6dKlC5988gnTpk1j/PjxXHvttU1qt7WQd3MhhGgrvMFlZDUPpQGT5IC9gpMW4aMkWN8isvwYRmySPQ8DKDrFhESPJ1ih22Wfq0y7jQrlppKIUMaiao5FvPcEphacb+EsB8Dva/ra8kKI84emaaxbt4527doxdOhQRo4cSffu3Vm9enXYeSfPFzfNqveJ4ORtTeOD3bsxTZP777+f1NTU0DZt2jQMw2DMmDE89lg2zyxZTp//dxPL/vfPLH7uDwwfPrxJ/c/Ly2Py5MlccsklPPDAA0yePFmWmj2JZCyEEKKN0PwGOEDTfBwrK6IHdsbBEeml0hkLXoj3FoAzCsOswHREUXm87mUXAbwVHkBHd9ntaAF7ydpCYnHoGolRLjrGuTkQXHEq2XeCg8ZFRKuvcbgq8AN+rwQWQrQmW7ZsqbVv3bp1Ya+7dOnCm2++WW8b2dnZPHD3j/EFF4iwgkOhNqx9mUSziAJN56GHfsbDD88kJaX2UE1/sGbObbfdxj33jSOhzH59JCq8wHNdq10VFRXV2y+ABx54gAceeOCU57RlkrEQQoi2wrTrU+iGn+IT9tKzpSqSqMhyAm77AzbRX4hF9QRv74n6axT5y4MBhduuZ+EI2BmR4yqOpBg3uq7RIc5NQXAoVJL/BOWOVACMYJajokblYCGEgOpgAsAyTZSy0IPVs1VwRrZhGHX+rOEwQIFSGjpWqE63eVKb4syQwEIIIdoI3QoGFs4A5cV2YHFMxZPgLkFz20/0OpiFFPkD6Mr+wm+Weuttz19RNRTK/q/Lb3/oFyh74jZAx9gIjgUDi/a+YsodHYM/Y7dbXl7RfDcohGgVlFK1ggArYGKoYGBRtdJTPYGFpmvoVK/+ZGKfpywJLM40CSyEEKKN0JQ90dNwgTdYHO8YCSRGeHFE2l/03QQ4XnocjeC8CU/9Q5UCHjtTobvtD2uX326jMDhxG6BDnJtCghPDMQkoe/Kp4fahlCUZCyFELcqyQsOU9GDwYJomOsH3I+3UGQtCgYUGGJha8OuuBBZnnAQWQgjRZtiBhTPSwFtkT8o+phJIikwgJrKC4uCX/pITeaHAgvoTFpiegD08wW1/2Ef57OxD1VKzAMmxEQRwcEIFC1j47Y8dh9sC5aWiUgILIUS4qmyFbhgYweVzLTOAg/D6EvVmLDQNvSqYQA8FFpolc7rONJm8LYQQbYVmr9oUEROBKrYDiwItFs8XB4mN68AxlUC8VkFF8TdoenD+hL+eJ4KA5VWgfBgu+8M62mfXsyg4KWMRF+EgwqlToOJop5Wh+UwwwHCbKOWlUgILIU6LruskJyfX2tcaVM2vMBwODMOBP7jPpUxCiQhOkbEA9Kpn50rDCmY4dBWot0CfaB4SWAghRBuhgmvex7SLxVu2B4BiRxSucgeRkWUcUwlcxDd4io+gG/YwJ82sf018y1cVWFgoINZXDECBiqN/MLDQNM0uklcWz0V8g+6pgGg7sEB58XhPkRIRQtTLMAy6du3a0t04I8xg3Qrd4UB32MGDPxAgMpix0ILLzp4qkNJ1HSx7Ajc6YIKBiWkpHIYEFmdK6whthRBCnJIn4MEKVs+NT25PTOVxAMocbvzlDiJ9leSTAIDvRB66w/5g1yx3vW2qACjlxXCZVBJFor8IgELiSI514zlwArPUR8c4N8eDtSwcHrt+RVXGwlMugYUQIlzVUCjDcKAb9jNwvxmomlqBCgYWp8pYGMHhT0ppWFUZDiwCVu0lZkXzafHA4rnnnqNbt25EREQwYMAA3n///XrPzcvL49Zbb6VXr17ous706dPrPG/NmjWkp6fjdrtJT0/njTfeOEO9F0KI88OxsmJMhx1YJHVOI8FnV932uXT8FU5UhUahZk+y1kqO4HDZH75VE77rovw6KC+G26KEOJJ8RYC93Gy7sgD5y/ZS+Pq/7YxFcGUot8fOathzLDz4yivPyP0KIc5fVUOhdIcDw2EHFqZpZytMVT0W6pRDoYLZDEtpqGBE4sCUwOIMa9HAYvXq1UyfPp1f/epXfPzxx1x99dV873vfIzc3t87zvV4vycnJ/OpXv+Kyyy6r85wdO3YwceJEJk+ezD//+U8mT55MZmYmu3btOpO3IoQQ57QjeYdRuj2sKT6tE0kBu/CdigjgL3fgL3dSHAw83GX5OCKqPrBPEViYOio4FKpUxdK+KmOh4tDyC1nlfo8tuR+QHOsOVd+O9NjnGC47Y+GXjIUQogYzYA/FNByO0KpQVnCpWVMzUEpH1/VTzpUw9KolZgEtmOHAImBa9f6MaLoWDSwWLlzInXfeyY9//GN69+7NokWLSEtLY+nSpXWef+GFF7J48WJuu+024uPj6zxn0aJFjBo1ilmzZnHJJZcwa9YsrrnmGhYtWnQG70QIIc5tBf/9MvTn8oT2tLdKAHBGVAYDCwcVDrvORYznOBExwSFQWv2BBZYByoPhMvGYUbiV/WWggDjKjuXh10wOqSMkuwyOB6tvx3gKUYDhtoBKApWy/KMQrcnw4cPrHVHSUFbADiJ0R/VQKKUsht90FzMenY9S2imzFQCGcXLGwt4nGYszr8UCC5/Px4cffsi1114btv/aa69l+/btp93ujh07arU5evToJrUphBDnu9Kj+QAYgUqKK09gYGEqDVekB3+FA1+5A6/b/khI9BUQ1c4eFqW0yNB68rUoB5qjHE0H5bcDkTIVgTsiiopSe8hTheYjutIXylgk+orwBrMgurMEyyOBhRCimlIK06zOWFQNhdJQrP2fBfxq5rQGBRa6Eay+jVYjY9G0wGLt2rWMHj2apKQkNE1jz549tc7xer389Kc/JSkpiejoaG644Qa++uqrJl33fNFigcXx48cxTZOOHTuG7e/YsSNHjhw57XaPHDnS6Da9Xi8lJSVhmxBCtCaVBScA0K1KSk7YVbcLiCdGL0OZOv5yJ1qw0F1SoJCopCQALCMSr1nPcCXlQHeW2n/22R/+BSqODnERFFdUv49qJ4pCcyyS/CcoNtsBYDjLML0yLEGI0xEIBPj888/DtkDg/A/UlWWhrKrieA40XScQCKCjSGwXT1RsLErp3xpYhIrkKQ1Ns99nHJiYVtPec8rLyxkyZAhPPPFEvedMnz6dN954g1dffZVt27ZRVlbG9ddfj2m2/joaLT55u+b4uOZYX7ixbT7++OPEx8eHtrS0tCZdXwghzjX+Ert4nW5VUlFsBxbHVDzRpr1Kk7/CgSPSnkidqEoxOtqBhWlEcKKyuJ5WnehOu3aFw2d/yBcSR4dYN8XBmhYAVklBaChUsu8EpVZ7uy/OMixfM96kEG2IUorS0tKwrd7s4llmWRYzZ84kMTGRlJQUsrOzw47n5uYybtw4YmJiiIuLIzMzk6NHjwL2ilALFj/NyO+PY8WKFfTo0YMuvfugYQ+F+vmj8wCN7du3o2larS0rKwsATdd46aWX+M53hpOSnEGvqyfwv2v+ElrKFuzvi8uWLWPChAlERUXRs2dP1q9ff8p7mzx5Mo8++igjR46s83hxcTF//OMf+d3vfsfIkSPp378///u//8snn3zCxo0bT/t3er5oscAiKSkJwzBqZRLy8/NrZRwaIyUlpdFtzpo1i+Li4tB2+PDh076+EEKci6zyYF0K5aGysCqwSCDSX4k7KhrLp+NwefCr4FPAdlVzLAyO5h2t1Z6yFGguDKcdsDjt5jmu4kiKdFKqqld78pcdC2UsoiwPHisBAMNVTnBahhDiWyilKDfN0FZhWngUYVuFaYWd01xbYwOWlStXEh0dza5du5g/fz5z584lJycndB/jx4+nsLCQrVu3kpOTw8GDB5k4cSJgV9gGOPTf//Laa6+xZs0atrz9Fnqo6raGUhqDBw8mLy8vtL377rtEREQwdOhQAN7Y8Ca/nv0I9957Fzt2ruWuH93EHTPm8H9bN4f1dc6cOWRmZrJ3717Gjh3LpEmTKCwsPO2/pw8//BC/3x82LL9Tp05ceumlbWJYfosVyHO5XAwYMICcnBwmTJgQ2p+Tk8O4ceNOu92rrrqKnJwcHnzwwdC+d955h8GDB9f7M263G7e7/rXahRDifGdVBocW4MF/wn74clyLg8oyOna/iK8/+xcuq4LjxJNKIT7Nh2YZKN3gaO5huDgjrL2A3/6Q14OBRYTfTvEXqDgSDZ1KrToVUeo7QcDRmUrlIlLzQSAS3GC4KvF7pFCVEA1RYVn0eO+TGntrfI3bsf+MXPvg0L5Ef9vQo5NkZGQwe/ZsAHr27MkzzzzDpk2bGDVqFBs3bmTv3r0cOnQoNEJk1apV9OnTh927d9On18UA+P1+Vq1aRXJyMkVH8sCyh13aE7E1IiIiQgv5FBQUcNdddzFlyhSmTJkCwMJFv+OWH97M7bffTnR0EX3vvZAPPtrL/yx9hlturP6emZWVxS233ALAvHnzWLJkCR988AFjxow5rd/VkSNHcLlctGvXLmx/U4f6ny9adCjUjBkzWLZsGcuXL2f//v08+OCD5Obmcu+99wJ2JuG2224L+5k9e/awZ88eysrKOHbsGHv27GHfvn2h49OmTeOdd97hySef5LPPPuPJJ59k48aNTV6hQAghzmv+4Bd4zYtRamcgiowY/OUO4jt0JDYpmahABceCmQVPxQkM0846lBzJq9VcwGcHElUZi6iAHUgUEkeU3/6zHvyIKdUrSY50UYA9gVsP2KtPGa5KlNniI3KFEM0sIyP8QURqair5+fYCEvv37yctLS1s2Hl6ejoJCQns378/VMMirXNnkpOTgWA9C8LnRlTNsfD7/dx444106dKFxYsXh47v/+wzrhx0hV15G3uZ2iGDLuPAF/+ut6/R0dHExsaG+tqcmmOo//mgxTIWABMnTqSgoIC5c+eSl5fHpZdeyt/+9rdQifq8vLxaNS369+8f+vOHH37Iyy+/TNeuXfnyyy8BGDx4MK+++iq//vWveeSRR+jRowerV6/miiuuOGv3JYQQ5xot4AAnaLqXyAr7Q7PU6cZR4SS2ezJxyR0J+PaQrxIAqCw6gm51AmKoPF5Uqz2PJ7hqi9MOPmL8doBRoOKI81RSAbRzx+Hxeym3KonXFcdVPJ214ziCwYTu8oLVoh9DQpw3onSdg0P7hl4H/AE++SQ8g9G3b18czub//1SU3rgHAE6nM+y1pmlYwUnT9X3BrtpfVXU7Ojo6dEw3DPRgHQuqqmgHA4v77ruP3Nxcdu/ejcMRfu861cX0TE0LXgMsS6Hr2rf29XSkpKTg8/k4ceJEWNYiPz//lKNnWosWf0efOnUqU6dOrfPYihUrau1ryDi/m266iZtuuqmpXRNCiFZDM13gBN0IEOe1q25XOp3EFDmIS+pAXNIxCso1CjQ7q2CeyEO37A9FX0l5rfYqKoOTwV0eAGKDk7ULVBztK+3zE6JisQLRHCr9mijTx/HgkrPO4LAph1sCCyEaStO0sOFIfssiosb38yhDx9mIIUstIT09ndzcXA4fPhzKWuzbt4/i4mJ69+4dCiw4KfgwHI6T5lgQKo63cOFCVq9ezY4dO2jfvn3YdXr37s3O3R/w/czxgIal6Wz/x14uvugiApaFSz8zv6cBAwbgdDrJyckhMzMTsB+Uf/rpp8yfP/+MXPNcIu/oQgjRBmjKnkemOy0S/fbERDPCxF/uIC65A3HH8zmS76DMiAYFjtKj6KobAFZ57aWbyivtTIXh9OHDRYrfXjmqgDh0r73UbEJ8Ai7dyaHSr3EFKkITuN3BoVKGyx8apiCEaBtGjhxJRkYGkyZNYtGiRQQCAaZOncqwYcMYOHAgxw//FwglJuw/6waOkwILwzDYuHEjM2fO5NlnnyUpKSk0fyEyMpL4+HgefvhhMjMz6dX3Eq69NoO//+V91v79XV5/5SUClsJ1mv0vLCwkNzeXb76xF8H4/PPPATtTkZKSQnx8PHfeeScPPfQQ7du3JzExkZ/97Gf07du33pWkWhMZ3CqEEG2ApuyidCrKIMm0a1o4Irz4KxzEJSUTn9wRf7mDEqd9XlTlccDORqjK2sMCKirtY4bLRwlxtPcXAVCo4iAYWLRLSqRTl04A6IGy0ByLyOCwKcNtgZI6FkK0JZqmsW7dOtq1a8fQoUMZOXIk3bt3Z/Xq1XZxvDoyFpauYwRrUWiawjAMtm3bhmma3HvvvaSmpoa2adOmATB+/Hh+99h8nn/+ea68chwrXlrNiwtnM2zwd5pUJG/9+vX079+f6667DoCbb76Z/v378/zzz4fO+f3vf8/48ePJzMxkyJAhREVFsWHDhm+vvdEKSMZCCCHaBDtgCMQ5ibXsbIM7ohzT4yImMYm4pA74y514kw3wQbz3OJpmF8ZTvtpZhfLKSpRS6K4ARao97f1fAFBiJODzHwEdElOSSLkgFbYAlIcyFjHBwMNwmyjMsPHOQojz25YtW2rtW7duXdjrLl268Oabb9Y6zzJNlGXxs2kPMH9R9URsMzgMfvPr/0OBOwbDMMjOzq5VH6Ome6fczU0/yiQiqhhlOUj0llGsTAInzfeoqaio6JRtZmVlhWpl1CciIoIlS5awZMmSU57XGknGQggh2gClRQJgJNhf4MuVG5fmIbpdewyHg7gOHfCXOzDd9gdue38hmm4PWdICtZ9BeSoqAT+G28QbiMIZnFhpRLWnTLMDl/YXJNMupT1unERqPgqCgUWctwgLDcNtgvKGVpgSQrRtVdkK3TDQT5owblr2e4Sp6SilNfjJv6Zr6Ng/UzXq0oHVpIyFODUJLIQQog2oCiycUfbrYyqBaLOcuPb2co4x7doT8LpwuO0hTslWEX63/WGumbVHI3vKKkB5MVwWym8fL1FRxLoj8Gn2l4OExHZomkaSO4Eozc+xYPXtJP8JKojG4bZQyoPfK4GFEKK6OJ5eY3Wnqv0mOqA3fEiRrqGjoZQOmh1MGJgELAkszhQJLIQQopWr8HmwjGDGwmV/iT9GPJG+SuKSOwD2E8LYxGQMww4sIvDhj7U/fDWrdmDhK61EKa/dnt/+ElCgYokPfnhH6tWFRzu2Sw5mLOw5Fu19Jyghzv5Z5cVT6T1Tty6EOI9UZSwMo77AwkApLSybcUpVgQVaKLBwYErG4gySwEIIIVq5vIKjmFWBhbKHKR0nHjwacUnJofPikjvgVh5KlH2uHmN/mGvB+Rkn81d4QPkwXCZGsPheAfFEW3Z9izh3TOjc1E6puDE5URVYBEoos2Ix3BZKVVIaXLpWCNG2VRXHM2pkLKgKLE5zKBRKOyljYYXmWIjmJ4GFEEK0ckcPH0QF12x3BJeFLTRiQ0vNVolL6hCsvp0AgBHlDx6pK7DwBTMWFk6f/YFdqGKJCNgZj4TouNC5nXqkoWngxYUZHOjs9wcDHWcp5RJYCCE4aY5FzcBC2fut4LCmxg+F0tBCq0qBCgYqovlJYCGEEK3cicOH7T8oC3flcQBKHJEEyh3EhmUsOhLp9XCMBAAMl/3hWzU/42QBTwBNq0R3KtzBgnfHVRwuv50RSUhICJ2b0rMzhtJxayaFwSVnlcceXqU7S6gI1sQQQjScpmm0a9cubKurovX5xKwnY6EFJ29bmp19aOh9arqGruzAwsAKztEAZUlgcabIcrNCCNHKlR07DqRhmB5igoFFpdOJXmBX3a4Sl5wMB+F48Mu/Uw8uN1tXYFFpojlKAYgKFrwrIB7dXw46tEtODJ1ruBy0d8QR6fdzXMWRrBWj+xzBY+WhmhhCiIZzOBz06NGjpbvRrEKTt2vMsdBVVWBR9T8NpBNcFcqeaRHQDAxloVkmSqnzPhA7F0nGQgghWjlfkT38SbcqaOcvAMDv1ghUOMPmWFQVyTth2PMjIq1yACwjAq8ZPsHa9FkYTjuwiPbbxwpVLLqyfyYxNTns/A4x7YnSfKFaFo7gvAzdVYanpLz5blYIcV46uThezYxFVWChNA0aM+86OBSqqo63qdlfe2VlqDNHAgshhGjl/CX2F3/N8tA+YFfdVm4/uorHFRkVOi8uqQP+CiclDntftGkHDqYRQX5xYViblk+hO+2AIM5n/7dAxaO0MgDadwoPLFKSOxKp+UPVt6uGTxmuCjylMsdCiLZOWRYqOKm65hwLg+CS1Bqhcxri5DoWEBxKBRiaBBZnigQWQgjRylmV9lNAv+EhSRUB4HKXE52QGnZebFIS/nInFU77Qz3BHwwmNJ28r74Kb9OvoTvLCWDQzmcHIIXE4tLsYVHxCfFh53fq2pko/KGMRVTAnhhuuDz4KmQolBCtxfDhw5k+fXqjf666OJ5eazlZQ9nBxLjxk3h0djZWQ4MLXbNzFcpuzwoOfXJgYZqyMtSZIIGFEEK0cio4ismMLsOBhaU03HoFcUkdw84zHE5crmQCbvvDN8F/Ai24fGxhbm54o6aG4aygjFiS/HYWxGMkoGuKGCMSR40njp0uTiNK84eqb0f77WDCcHkIlPua9X6FEOef6vkVzvD9SoUyFitXLmHmjOlYZsOKamqaBpqGpuxVpKoCi6YMhVq7di2jR48mKSkJTdPYs2dPrXP+8Ic/MHz4cOLi4tA0jaKiotO61vlIAgshhGjlNH9wacao6sxCpOkJm19RJTahE8ppBxNJZiGaaa/YVJafH36iqaM7KylRsSQGSuxdRiwA8RGxtdp1J8fQEUdoYnic3x4+Zbg8BColsBCiratvfoVpKlRwHle7xHhioqND9S4aRNfQ68hY+E+zSF55eTlDhgzhiSeeqPeciooKxowZwy9/+cvTusb5TAILIYRo7QL2B7UWYX+ZP6YSiPRW1hlYxCV3wKnbwUSSKsHvtOc/VBYWhZ9oOTBclfgDkRjYQwoC2HMz4mPCh0GBPda5S0R0aChUgr8EEx3D7cNXIUs/CtFYgUCAgwcPhm2BxnzhPoMsy2LmzJkkJiaSkpJCdnZ22PHc3FzGjRtHTEwMcXFxZGZmcuSbbwA7sMjOzqZfv34sX76ci3v2ILL7FSiluOH7k3l07mNYZoAtW7agaVqtLSsrK3SdpUuX0vuqvlzQLY0BA25g9evrgGD1bctC0zSWLVvGhAkTiIqKomfPnqxfv/6U9zZ58mQeffRRRo4cWe8506dP5xe/+AVXXnnlaf3+zmcSWAghRCunWW4ADKcdMBwjHjyEFcerEpfcAYfyEAg+4SOiCAB/SY1aE8qB4fJi+e22i1Q0zuDwhHaJCXX2o1tiEgXKzmYkewspw66+bVZ46zxfCFFNKUWFLxDayr1+8o4Vhm3lXn/YOc21KdW4p/srV64kOjqaXbt2MX/+fObOnUtOTk7oPsaPH09hYSFbt24lJyeHgwcPctuddwLVE7cPHDjAa6+9xso//Yk977wSfHwRXN0pYDJ48GDy8vJC27vvvsv/Z++84+So6////EzZvnt7vV8uvVeSEBJCkd6UooAiEmlSRBAsX/UngthFpUmzAIIiKlV6b6GlF9LrJdf79t0pn98fs7fJkUtIw1D2+XjMze7sZz7zmd3bnXl93s3j8XDYYYcB8Oijj3LllVdy1aXfYu5LbzBnzpe44oof8MrceahYWFmLxfXXX8+ZZ57J0qVLOfHEEznnnHPo6uqfrCLP7pOvY5EnT548n3akc/PvVhyLhVN12+pXw6KPUEkZnvYUnYQopwfh7YUUWAnjAy11FD0D2XoUnTKEOxvMUVRaPOAwqmoqSWxuBaDE7KFX1lLo6sRM54VFnjwfRtKwGHPtc7tu9NjLH8mxV/z0OHyu3b9lnDBhAj/5yU8AGD58OLfddhsvvfQSxxxzDC+++CJLly5l48aN1NbWAnD//fczduxYFi9dyuFHHwNAJpPh/vvvR3W7KIp5MdhWbdu2TFwuFxUVFQB0dnZy0UUXcf7553P++ecDcOONNzJnzhwuvfAbxFMJho2fw3vzlnPjnffz8MyZuRiLOXPm8OUvfxmAX/ziF9x666289957HH/88fv4rn02yVss8uTJk+dTjpBOgbsAjrDoVX0YCb1f1e0+CkrL8GaStMkwAIrbiZ+QqQ/MWAod1ZVGNZzLSCchNOm4TRVVlQw4juqhtSSyxfZc0iRt+tHcFnbmg6IlT548n2QmTJjQ73llZSVt2TitlStXUltbmxMVAGPGjKEgFGLN+vW54niDBg2itLQ0F0/hVM3Opo21trl8GYbBGWecQV1dHTfffHNu+8qVK5k1axYoAlU6RfKmz5jMynUbc65QHxyr3+8nGAzmxppnz8lbLPLkyZPnU47EA0BIOiIhpntwpT34CsI7tA2WliES0IETC6FqMScfS6b/PJSNjuoy0Q1HcHTKEIpw3KU+WBwv1/egYjwoRKSXkEhiG25Ut4U0Ph5+4XnyfJzx6iorfnpc7rlhGCxbtrxfm/Hjx6Hr+gd33S/H3hM+OAYhRC5F7M4qXkspEYhc8Lbf73e229uERd9e1naxJJdeeikNDQ3Mmzdvh2x0QohtRfKkgOyxVWzMrCvUrsaaZ8/JC4s8efLk+bSjOFaCQtupwJ1yqQTdFQNe3EOlZRhxnS5fACR4lBhxQJjbLhe2LUEIVJeFK1vorkuG8CoZFBSCwR2zQgGofp0iRaFDFhASSchoqD4bdjN1ZJ48n2WEEP3ckQwh8Wj9v8M+l4auf7xv7caMGUNDQwNbtmzJWS2WL1tGJBpl+LChOxTHI2udsIUCos9i4fxm/P73v+ehhx7i7bffpri4vwvm6NGjefPNNznntLNzRfLmvbeQ0cPqEWKbYMmzf/l4//flyZMnT559QkqJrTgWixI7W3XbY+H11w3YXne5UUQhPaoLTAgQdRyobFeujZmxQKZRXRa+bA2KDkJ4MQhqvh2KW21PhcdNpxFiCC2ohoLqspEyn242T57PCkcffTQTJkzgnHPO4aabbsI0TS699FIOmT6dKZMm7vD7IaQjIiyhIPpcoUyTF198ke9973v88Y9/pKSkhJaWFgC8Xi8FBQV897vf5cwzz2TS2IkcPmUmj7z6L57874s89887AVDk3k1odHV10dDQQFM2i9Xq1asBqKioyMV8tLS00NLSwrp16wBYtmwZwWCQuro6ioqK9uq4nxTyMRZ58uTJ8ykmkkhgKz4kUGL3AKBqKULFlTvdx+utIq47QiIsHStHX2YpgFQqg5QZVLdNIOMUuuuUIXzCoMA7sLWij7qCYC7lbJ8blaIl9urc8uTJ88lDCMFjjz1GYWEhhx12GEcffTSD6wdx1y037VAcD7YJC1sIENl6FLbNG2+8gWVZXHLJJVRWVuaWK6+8EoBTTz2Vm2++md/d/HsmH3Uw9933ELff/lMOneWkgNXYO2HxxBNPMHnyZE466SQAzj77bCZPnsydd96Za3PnnXcyefJkLrroIgAOO+wwJk+e/KGpbD8N5C0WefLkyfMpprlpHabmIePOUIBzA68rcUJFA8dBAARCNWxlFSShKOs+RTZOAyCWTOQsFsG002cvQcLYhEM71rDYnmHVpXS2OUXy3OlsQSxXam9PL0+ePB8zXn311R22PfbYY/2e19XV8fjjj+eeJyK9RNrbUDUnluO6667L1b7osyxIAS+88Cy9zS1IW/Lj//cjrr/++l2O5dJLL+Ub51+E2Z4k6u5Ed0exEgpIULFp7U1SFvL02+fDqmTPmTOnX62Mgdh+/J818haLPHny5PkU09GwAYSC9GbjK6SOx0wMmGq2j1BpBZbmWBOKs1YOstmcABLJJFKmwSUpNJxq3nGCCAGFH2LmHzKkLBcY7jOcbFCKK2+xyJPns0xf5qcd4isAtc+yIEBV9VzWKNvcTYuD4rhPiWxtHks44kUTFkkjH9+1v8kLizx58uT5FNPb7PgBozsFn9plGK+ZGrDqdh+h0jIUHCtCmezGVCRyO2ERi8dRtChJ4afYzAqWrEWjsHzgGhZ9VNUX5FyhghlHUCh5i0WePJ9p+rI8qQMJi75YCEWiaXqujWXtXvB1X5KKPmFhZ5+r2CQzeWGxv8kLizx58uT5FBPvcAK2Va0DgHZRAAkxYNXtPkIlZWjZYndeMhjuNFLZ5i6Q7I2iaFFiMkCh5aSwTUrn9eLqnQsWgJICL53ScYUqyDh1NTR30sk0lSdPns8kfSJB/UCMhW1L1GzNbYRECGU7i8VuZnXqs1jYjqWiT1hoWGQsG9PKp5bdn+SFRZ48efJ8islEYgC4FEdgdIgQRkIjUDRwETtwiuS5jBTRbGE94e3GUrxkTCd7UzIaQ9FjZAwvChJbClLCudgXle3aYqEogrTiWCyKMz1k0FH0pJNpKk+ePJ9JduYKZW4nLPqyY6tqViDsZppqoQgQAiVbuVtmO9KF02/eHWr/khcWefLkyfMpxow5cQw+0QNAjxZAFYUDuhz0ESotw5NK0p51WVJcPdiah6ZOpxptOppAccWwDcdK0U0At7DQUfH5fB8+KLcjPkrMHqKEUF0pkqn03p5injx5PuHszBXKtO1c9qY+YdEnPqzdtVgAKKDIPouFs0nLC4uPhLywyJMnT55PMXbSuXiG6AEgqnnwuCt2uY/L68Ntu3NB1orm7NuywcnJno4lUbU40nAu8F3SqWERcgUGLLr3QTwFTqrbkJ0gbgdRXSliiXwAd548n0Vsy0JmK10rWv8K36ZhbhMU2VSzOVeo3YyxAMdqoWRdofrKd/cFhefjLPYv+XSzefLkyfMpRmYECCiUPQAkXToVntoP3c/lKqcje711i04soLtxq9NHLIXqSiIzzoW+kxBeYVDgC+3WmApKSkl3ariFiZHx4Hf3Es8Lizx59gghxA5V7ndH2H/c6BMIiqKgKB8UFo77pYVAZAvn9aWk3V1XKKdz4VTfBhBOPFdOWOQtFvuVvLDIkydPnk8zpgo6FGWFheGCQPjDhYXXV0VPJg4WBOikF4h3OAHgmUQGRU+iGFlhIYN4RYZwwa5rWPRRURqgc3WIKrqQhgvVbRKN9H74jnny5MmhaRojR4480MPYZ6xdpJrtC+q2UFGycVx9Fos9c4USKAiQyjaLRTbbVMa0HZcrJe/Esz/Iv4t58uTJ82nGdCpo91XdRjcoKNm1KxRAoKCeqO7EUIRwAr9T3U7NCjNhoLpSuLIuBJ2yAJ8wKCzedQ2LPsoKPLnMUEpGRXVZRNs6dvuU8uTJ8+lhV6lmZU5YKIg+YZG1WEjbxrZ3M4A7a8mRUkA2tkLFxqU621N5d6j9Rl5Y5MmTJ8+nGCHdWIqkNBtjocskodJdp4QFKCiuIak7F/I+NyojmgTASpkoegpPtnJ2F0HcmBRV7DojVB/lIXeuloVmgOaxiHd27slp5cmT52PKEUccwVVXXbXb7XdVHE/aWWEhFBRF44gjjuDqq6/JuUXtdpE8tc9FTEEIiZm9/S3QnP7z7lD7jwMuLG6//XYGDx6Mx+PhoIMO4o033thl+9dee42DDjoIj8fDkCFDuPPOO/u9fu+99yKE2GFJpfIFmPLkyfMZRLqxPFFcWX9il4ztsup2H6GyMjLZ/O991bfthNOHzFiorgz+tPO72ksQRUBx9Yf3C1AW9NCZDQx3ZWxUt02yJ+8KlSfPZ5FtNSx2FBYi665kCwVV1XjkkUe44YYbcm13N4BbZH/LkAoqFgnVSaUdEM5vWDKz+7UsHnnkEY477jhKSkoQQrB48eJ+r3d1dXHFFVcwcuRIfD4fdXV1fOtb36K397PxG3dAhcVDDz3EVVddxY9+9CMWLVrE7NmzOeGEE2hoaBiw/caNGznxxBOZPXs2ixYt4oc//CHf+ta3ePjhh/u1C4VCNDc391s8Hs+AfebJkyfPpxsPltspYtclA3gzu6663UeopAwU56JdavcgAduJo0SaoLoyhLKVsyPCSTFbWLKbrlDBbRYLb8ZAdVskI5E9Oak8efJ8StiVxaJPWKQNEyE0ioqKCAaDOXcoa3cDuEVffwoKdk5YuGznN2xPLBbxeJxZs2bxq1/9asDXm5qaaGpq4sYbb2TZsmXce++9PPvss1xwwQW7fYxPMgdUWPz+97/nggsu4MILL2T06NHcdNNN1NbWcscddwzY/s4776Suro6bbrqJ0aNHc+GFF3L++edz44039msnhKCioqLfkidPnjyfTXzg6gKgnTCa6cLl/fBaE6HSMlTTmc0rJoKhWYhMNmOLqSB0g0LTEQNx6cMjXLjd7t0aUXHATSdOjIUvnUbVbdLx2J6eWJ48eT6m2LbN9773PYqKiqioqOC6667r93pDQwNf+MIXCAQC1I0YxcVXXEnHdu6Q1113HZMmTeLBf/6TIYecQvmg8YCSc7NSVI233nkXXzC0g4fKnDlzcv3ccccdDB06FG9RgHGHT+Ghhx4HwMxaPDzlI3jkwb9x6Zwv4/P5GD58OE888cQuz+3cc8/l2muv5eijjx7w9XHjxvHwww9zyimnMHToUD73uc/x85//nP/+97+YexJw/gnlgAmLTCbDggULOPbYY/ttP/bYY3nrrbcG3Oftt9/eof1xxx3H/PnzMQwjty0WizFo0CBqamo4+eSTWbRo0S7Hkk6niUQi/ZY8efLk+TQgFQ9atg5FpwihazuvuL09Hn8At21gSYGCRHqjYOtOn5bEcmsUm45pP42bAndgt8ekKoKEy4nHCKWdGUPb+Gy4CeTJs9dICZl4brGSERrWrey3WMlIvzb7bZFyj4Z633334ff7effdd/nNb37DT3/6U1544YXsaUhOPfVUurq6eO211/jX3+5lU0MD5875er8+1q1bxxNPPM3Df/otr778OEJsS0WraipTp0xm7fJlOc+Ul19+GY/Hw2GHHQbAo48+ypVXXsk111zD0gWLufCc87nsiu/z+uvvgZBY2Vvgu//wa447+VTenr+QE088kXPOOYeurq59+aR2oLe3l1AohLaLwqSfFg7YGXZ0dGBZFuXl5f22l5eX09LSMuA+LS0tA7Y3TZOOjg4qKysZNWoU9957L+PHjycSiXDzzTcza9YslixZwvDhwwfs95e//CXXX3/9/jmxPHny5PmYYJomtuLFI5ysTl1qELe7/EP2chBCENZddFJAGT1Idw9YToYpLEi6fRRKx8qQkW4K/LtXw6IP21cMMSg0oiQA7LzFIk+eXWIk4BdVuacqUPe/OvYPm8Dl3+3mEyZM4Cc/+QkAw4cP57bbbuOll17imGOO4cUXX2Tp0qVs3LiR6uoq2sIhbr3xtxxxwonMmzePadOmAc4E9J9u+RWDS7x0uf0oyrZbVkXVcLlchAJBwuUVdHZ2ctFFF3H++edz/vnnA3DjjTcyZ84cLrvsMuyMxVUXf5N3lszl1lvv44FZRxLPukN95ewvcsKpX6SywMsvfvELbr31Vt577z2OP/74/fLWdXZ2csMNN/CNb3xjv/T3ceeAB29/sJiLlHKXBV4Gar/99hkzZvDVr36ViRMnMnv2bP71r38xYsQIbr311p32+YMf/IDe3t7csmXLlr09nTx58uT52NDd3Yaleglm08X2an58gZrd3r/AW0R7NshauroR0olVk1JiC8ftyZICS2iEw+E9GpsadAROidlDCg9SzRfIy5Pn08KECRP6Pa+srKStrQ2AlStXUltbS21tbS6r06iRIwiHw6xcuTK3T01tHeXFYQCEkDhSyqEvHsO2TAzD4IwzzqCuro6bb74512blypXMmjXL2T8bvD1j2jRWr96ASoaU5riETho9FICUYeH3+wkGg7mx7iuRSISTTjqJMWPG5ITWp50DZrEoKSlBVdUdrBNtbW07WCX6qKioGLC9pmkUFw+c5lBRFKZNm8batWt3Oha3273bvsF58uTJ80mhvWEtluYlLB2zftzlIlRQv0M7O2UiVIHQ+1e99fmr6RDZ7E1KN1AJZCd0hOMW1UUQj7B2O3A713e4HJqhyIoQlVXgMT58pzx5PsvoPsdykMUwDJYtW9avyfjx49F1/aM59p40/8AYhBDYtpN5afsJ5O1rWHxwYtnj8eWqYyMkQmybC1fVbPVt0+TSSy+loaGBefPm7eBqlOuvLyuUnY3FQCKzFhif6owrka1lsf1Y94VoNMrxxx9PIBDg0Ucf/Wg+l48hB8xi4XK5OOigg3I+d3288MILzJw5c8B9DjnkkB3aP//880ydOnWnH5iUksWLF1NZWbl/Bp4nT548nxA6GjcDUCQdi0VaVwgW93eesCJpmn81j/a/LM9ZgPsIhAfTrTqxEz7RBcKxWAhhIEznAt5XHK+o4sMzTW2Pr9BJqqFikzZ8CG8+j3yePLtECMcdabvF1rz9lg++vt+WXXiS7CljxoyhoaGBLVu2YJnOhMLaDRvo7e1l9OjRAEhbYiPRcYSHUEQ/0dFXffuPd93NQw89xBNPPLHDBPPo0aN58803s++ds7w7fwEjRw5BCImmO65QCjYeMqRNC8ves1iSnRGJRDj22GNxuVw88cQTn6nMpAc0iuTqq6/m3HPPZerUqRxyyCHcfffdNDQ0cMkllwCOi1JjYyN/+9vfALjkkku47bbbuPrqq7nooot4++23+ctf/sKDDz6Y6/P6669nxowZDB8+nEgkwi233MLixYv54x//eEDOMU+ePHkOFJGWVqCG4myBO0uzKSjrX2siubwTmTLJbIpgtibQK7b5URcUDWGV5gMLQnQREc6FWLiSiIwzL9Ulg3iFQXH1ngmL0gI/XTJAkYhhGR5U777PEH6WWPz802xaspATLr8at2/PZpPz5DmQHH300UyYMIFzzjmHX1x/HdHuLn740xs4/PDDmTp1KgBGxkLFRhGQEjrqB6bBFU3j9blzueHXv+HWW2+lpKQk59Hi9XopKCjgu9/9LmeeeSZTpkzhqKOO4tG//4fHn36Kxx+/GyEk29/qh5Q0KdtFajfSznZ1ddHQ0EBTk2M9Wr16NUAuC2k0GuXYY48lkUjwwAMP9EsKVFpamrO2fFo5oDEWZ511FjfddBM//elPmTRpEq+//jpPP/00gwYNAqC5ublfTYvBgwfz9NNP8+qrrzJp0iRuuOEGbrnlFs4444xcm56eHi6++GJGjx7NscceS2NjI6+//jrTp0//n59fnjx58hxIkt3Zi1lWWKgivUNxvOTKbSkeE0vb+71WUFZOTHXcRMOyG1vJzvC5Uqhp5/LRSQivyBAuCu/R2MqDnlwtCzJaXljsAelEgtfu/wvr57/DyjdeOdDDyZNnjxBC8Nhjj1FYWMjxJ5/CmV+bw+D6wTz00EO5NmnTQsH5TUhpLhSl/824oii8t2AhlmVx2WWXUVlZmVuuvPJKAE499VRuvvlmfvvb3zJ27Fj+/MA93P3725g9exoIG7Gdu5O/r1DebgiLJ554gsmTJ3PSSScBcPbZZzN58uRcweYFCxbw7rvvsmzZMoYNG9ZvbJ+FGN4Dnvfqsssu47LLLhvwtXvvvXeHbYcffjgLFy7caX9/+MMf+MMf/rC/hpcnT548n1gykSSmlqEQJ+OS247jKwjnXrfTJukN29K8Jpd1EDpmUM7loKC0nEx2qrDY7ma96iWRTqK4Umgp56LcRYiw6tnjNIplITedsgBoRDUUhMsgnU7n4912g9Vvv46ZSQOw5t25TDrupAM8ojx5tvHqq6/usO2xxx7r97yuro7HH3+c7uZG0okEodIyfKFsoghbcslV3+c33z4bsLE1mUs1u33f//eda/jOt66gsKoa905q81x66aVceumlABhtCWzDIE0DAqe4Xluki9LoJkyZBCCZsejp6dnl+c2ZM6dfrYwPcsQRR+zgVvpZ4oBnhcqTJ0+ePB8NVtIg43WsFmmpEVL6B0em1vSQtjL82/sOL7mWY7YnMdu2ZWfyhgqwFOcCWSx7sFUXTVs3o7iS+NKOb3SPCBDy7H4Niz7KQx46skXy9IxE0zO0trbu9bl+llj+8rZYw60rlpOI5GuA5Plksn3wdh9G0kAliSpsDFRUNQNiR/chZbsA7t1CEQi57bbXtg10dwAbgYaFG2OPKnDnGZi8sMiTJ0+eTyl2Giy3E7jdThifK9zv9dTKTjaqbfTKOBuVVrpFnMTSjtzrQgj07FWiVPZgC2hatw5FT+DPODN8UeEnHCjY47EV+105Vyg9JdG0DM3NzXtxlp8tOrZspnndatyaj5qq0Uhps27e2wd6WHny7BW2taOwiMYNQsKZ4EhqXifuegBhoap9KWd3Twz0pZztu/W1bJOAppFQnWgLv0iRNizs/RTA/VklLyzy5MmT51OKzCio21Xd9rgrtr1mS1Kru9iobMvXvk5tJrmso18fgWzGPT9pTFeK9i1NKHqCsBEHIIafcGF4j8emqQpRvRAAXyqNpmVYtXIlhpFPO7srlr/6IgLBsfVzmOk+hWJ3FWvemXugh5Unzx5j2za25bhU9mV5krYkljEJOWUzsV1OUU7BABaLvloWe2CxcA6iZI9loimClOYkrAiKFJLdi7PIs3PywiJPnjx5Pq2YGm7FsVh0qqF+xfEyDRES8SRNandu2zq1BaMtjtEaz20rDYaJy2wxPG8PsbZuVD1J2IoCkMJNYenAdYQ+jLTb2S+YSaJpGTZu2sSdd975mQhw3Bss02DF6y9TFxiDzwoiEAwNTqJh+RKS0ciBHl6ePHtEnyAQirLNrSllYpDGJUxsBLrLuU0dyGLRt49l7Z6w6CuDIbK3vrad3c+VTalNNs4iLyz2ibywyJMnT55PK5YLn3CEQ4/qJxgenHsptbKLBrUdiaS0tBS3201cpGlWuvtZLYpDFbQTBsB2dZPpymC7DYptx68/I90UV+1Zqtnc8HwlAISNGEFN4nN56ezs5K9//SsvvPAC5u7ORH5G2LBwHulIjAlFh+W21QZGoUqV9fPfPYAjy5NnzxkoviKZMAhk3aASqg9BX9G6XVksdlMIZC0WfXEWUlpIKXFl4yx0LFwYpDJ5YbEv5IVFnjx58nxakR4KcKpuR3UP4ZJhuZeSK7e5QY0bN46xY8cCsFZpIbGdsAgVDaY9W31b1bqx44KMR6FAOhd/U+oUVZXs1fBcoXIAiswItpbm9NrZTJgwASklc+fO5a677srlis8Dy195gaGhifjUEBkN4thoQqfGP5I17+bdofJ8suizWPQJBCklkbSxzQ3KU4CUOxcWfTEWu2uxyAmLrFuVEBLDMPBpGknFibMIiBSJvMVin8gLizx58uw/VjwON0+CtS98aNM8/wOkh8Js1e2krhMuqwbA7EwSb+ulUXFEx5gxY5g4cSIAm9Q2kq1RjGx2qHDpSLqUIABetRMyHqTmXKANqSJQCYVCezW8QGElAKVWDzFVQV+f5rjamZx15ln4/X7a29v505/+xCuvvPKZt17EujrZsngpY8IzAVjZa7A56QSZDg6MZ/PSxaTisQM5xDx59gjLcuKp+gSCTFkkpYFPpJGA11+4S2GhaFn3KcvcrfSuueBtu+/W1yaVSjlxFrqTrtZPirRh5wO494G8sMiTJ8/+oeFdePhC6N4IT38HzMyBHlEePJRkhYWpQqDIiWlIruyiQenAFo4bVGlpKXV1dYTDYQxhsVlpJ5ktlhcuq6FXdYIbA6ILYftRtGzVbYKUaC4UZe8uJcHiyuwoMySsINJO0P3wWgqfS3D+sV9hzJgxSCl57bXX+POf//yZTkf7/usvMzw0BY/qJ6GYbDZgS8ZGSkmZtw6vCLBh4bwDPcw8/0OEEHg8nn7L9umkP+70uTD1uUIZSQNPXzYoxYOmakiZjcMYMMZiW8C3tHejwOYHXKGEkCSTSUeUZOMs/KSQSFK7616VZwfywiJPnjz7TtdG+OdXwMqKie5NsOj+AzqkPCCFl5Js1W2hSVTNyfCUWtXFRtVxgxozZozzuhA5q8VatTnnDhUIFxHRnIrbIbsbCCFwLtBdMkSlz7PX4ysuLMwFhpuGl7aKrQivhtmaIPngBo7oHsWpR56E1+ulpaWFu+66i9dffx1rN9NL7ikfVb/7ipSSNa++xsiCgwFYEckgAcNM02b2WS3G5bNDfcbQNI1x48b1W/a0UOWBxDIdi4WiaY4bVDJDMOsGZblDGEYvUloIoaEoOxbOVBQlN6mxOylnt1kssgJDSCzLwjAMXO4AEnAJExcmyXycxV6TFxZ58uTZN5I98I+zINHBSobwK+NsAOTrvwUjeWDH9hlG2ham4qFEOkHWHs2Z0bNTJtENnf3coPqYMGECAE1KF72t3RhtCYSikFKdlI9FsgdLCYDl3Lx0EKIyFNzrMZaFPHRKx41KZly0aOuo/O5UAofXgCbIbOyl5JkUX648hhGDh2HbNi+//DJ//etfWb58Ob29+1YYLpVKsWrVKp588kluvvlmfvazn/Hwww8Ti328XIoaV71PZaoel+Im7jZpNHUMfTUp36tsNBwxXx8cx6YlC8gkEx/SW548Hw+s7SwWMm0RkyYBUgC4fIVkMo7V1OUqRoiBb1cVTWPa4Udy00035bYJIXao9O007p9uVstaXlOpFD5dJ5GNs/CToiToGbiPPB/KJ0fa5smT5+OHZcC/50DHatooZk7qaroJ8lXtRWqizTDvzzDzigM9ys8kyXgvSZ/AI5xZwWKP40qQWt3NFtqxhE1xcTFlZWW5fYqLi6mtrWXLli2sV1uoXNaBflQdVraWRbHdi6VqqNlSE90EKSwO7/UYy0NumimgjnbIqKTTXSg+nfAJgwkcUknk+c0kFrWhrIgzWxlE/fAyXmtZQGNjI//5z38ACAaD1NbWUlNTQ21tLZWVlTudtbVtm+bmZtavX8+6devYunUr9gdcKJYtW8batWs5+uijmTJlyl67ee1PVr3wGsNDBwGwtKObtNsiUtgKeLF61zPFHoNfK6BYrWLDwnmMmnX4Xh+rra2NeDxOfX39J8qtJs8nD9vaZrGw4iaqSKEISVroKIqBbacRQsHl2nk6623uUNu+x83NzRQWFu7YOPvv3OcKpSjwu9/9jueee45ly5aR1v340yn8IsXcpWs5aETtfjrTPefee+/lqquuoqenZ7/0d9111/HYY4+xePHi/dLfrsgLizz7Tk8DdKyBIZ9zvql5PhtICU9/Fza8Qgo3X09fg6+kli9PquLmV07nt/rd2G/8HmXKeeDZu+DePHtPb1sTyUDaeSx9FAQcAdFXbRsca8UHbx4nTpzIli1bWKs2M2VpG6Gj6tA9zkxeqexBygyerJtAjxKkbi9rWACUBNwsz1bfFkkN09hWi0ELeyg6cySB2TX0PrOR9Jpu6lZ7OcN9MKuqO2k2O2lpbyUajbJixQpWrFgBgKqqVFZW5sRGaWkpjY2NrF+/ng0bNpBI9J/RD/sLGOSroDJRgNZr865vHe2pHp588kkWL17MKaecQnl5+V6f476SSSZwrxWofo14wKA54iVasCD3ejzQzObYMIa73QwOjmfNu3P3WljE43H+8pe/kE6nmThxIieddBKubIGyPHn2J9sXx1NVjfbeToK6893MuEIoWWuFrhcPGF/Rh5oN4JZym7CoqKgYsK0QwrFa9MVYKDK7r5MdClcA0p34SRIsqkP/FPzvSyn/5y6e+bvAPHuPkYRXfgm3ToUHzoCXrjvQI8rzv+Sd22HBPdgIvpn5Jh3BkdwzzcMXn7mbiJjAersSJdkF79xxoEf6maSjcR3S4xSxaxdh/IEapCWJrGpni9IJ9HeD6mPs2LGoqkq3Eqe1rQ2jPUHY7wQ2FsteLCWB13AES0T4KareuxoWALqq0K2GAdDSEiPZysKnH8dIp3JtXJV+Ss8fR8mF49CrA/jSOlM2VHBSw1jOs47kCwWHMrNiEkNLB+HzeLEsi61bt/L222/z73//m9tvv53HH3+c5cuXk0gkcGk6Qwtqme0Zz5npmXyxcyrTttRQ0xmkwizglMhkZhjD0VHZunUrd955J88//zyZzIFJRrDmhTcZ5HNSAc9vayASXolUTCorQlRUqtiq4H1lKwA1vhFsWbwMI5XaVZc75a233iKddj7bJUuW8Oc//5mOjo4P2SvP/5r29va9XpLJnbundnR0DLjPnnLEEUfwzW9+k29+85uEw2GKi4v5f//v//XL3DRkyBD+8MfbufL7/0dhURGXffcKgiR4a94Sjv/8WZSUjGPMmGP47nd/Rjy+rWBnW1sbp5xyCl6vl8GDB/PvRx4DyIkU2NEVauvWrZx99tkUFRVROKycQ487jvnzl/LA/f/h97//PStWrMDtdlMcKuaeh57ALUwm1IT5938eyfWxbNkyPve5z+H1eikuLubiiy/u5zI5Z84cTj31VG688UYqKyspLi7m8ssvdwTLTliyZAlHHnkkwWCQUCjEQQcdxPz583n11Vf5+te/Tm9vL0IIhBBcd911ADzwwANMnTqVYDBIRUUFX/nKV2hra8v1+eqrryKE4LnnnmPq1Km43W7uv/9+rr/+epYsWZLr7957793Tj3W3yVss8uwda56DZ77nBOn2MfdmKKiF6RcdsGHl+R+x6mnkcz9CAD83vsI89wz+Nc4mdeWlyFSKbxW/y+0Hn8LvQndjzb0FdfpF4Cs60KP+TNHZ2Iyu9gDQoRRQUDCYzOYIDZlWLJdNYWHhgDN7Xq+XkSNHsmLFCtaqzQxe2kF1aSX2WoEqJFawnZDhzCxGRYCiir2rYdFHTC8CA7wpg7ia4ZX7/sQ7j/6Lg078ApOOOwm3z8lI5RlWiPvyMMml7UTfbMRoiaNloLTVTSluxlCMZCgRkaTDF6fdG6PV7qEr2UOJp5Bqs4iqWIiyVAgltm1OTSv34R5SgHtwAVqRh+SyDiYsdDM4Wsbb+ho2qe289dZbLF+0lBNPOolR40bv0/nuKcbbXSiimmggzlbTxHRFUKXB+Ikvk0yuIpE4hm4TutN1FKo6Va4hbFyygBEHz9qj48Tjcd577z0ApoZHszK9mba2Nu6++25OPfXUAUVongPD9u6Le8ptt93G5ZdfPuBro0ePHlBI7k4q1w9y3333ccEFF/Duu+8yf/58Lr74YgYNGsRFF12U6/OOP/2Za678Ftf94HpakxFWrlrNcedczo+vvZrbbvsRPT0W11xzPd/85je55557AOcGfsuWLbz88su4XC6+eflldHR29rNYbE8sFuPwww+nurqaJ554gmK9gAWL5mHbktNPP45Vq1p59tln+fe//01RSQmeVEtu30xWrCQSCY4//nhmzJjBvHnzaGtr48ILL+Sb3/xmvxv0V155hcrKSl555RXWrVvHWWedxaRJk3Ln/EHOOeccJk+ezB133IGqqixevBhd15k5cyY33XQT1157LatXrwYgEHAmdzKZDDfccAMjR46kra2Nb3/728yZM4enn366X9/f+973uPHGGxkyZAgej4drrrmGZ599lhdffBGAgoKCPf1Id5u8sMizZ3Rvgmd/AKudf+KUt5zfcB6B2GauVh9yxEaoCkaddGDHmeejo3kJ8uELEUj+bh7FP5RTeHBoFPtH14NhYKqgdXZw4upNvD+5jrE0YL95E8qxPz3QI/9MEWvvwKt0gQ3daoBBJcNIrtq1G1QfEydOZMWKFaxXW4kvbaPyyKF0EqKUXixvLwWWM4OYxIff79+ncaY9xWBAIJPCKgpRUFZOb1srb/7zb8x74mEmH38yk0/4PL5QAUIR+CaV4ZtUhrQkZmcSoyWO0Rx31i1xCroFBXEfQ+PbWVL6JmkF6JX+nJBwDS5A9ev9xuOqCRI6dhCFq7spnlfFmjVreEtdTSQZ45//eYihz9dw/PEnUDK66iOPQWhfuI5SWY2Uktcjq0gGegA4cnaKVYkkTdZsxg1byuLeo1mUbuFz1DI4MJ4178zdY2Exd+5cDMOgxA4ysaWSERTzauFqmpLt/Otf/2LGjBkcc8wxqOrO3VLy5OmjtraWP/zhDwghGDlyJMuWLeMPf/hDv5vsWYfM4IrLLkPDR4n0cs2Vv+BLp3+eSy45E4BAYCS33FLM4Ycfzh133EFDQwPPPPMM77zzDgcf7GRIu/OPtzNp6tSdppv9xz/+QXt7O/PmzaOoqAijI8mQmlrSwS2Ac4OtqirFxcWUFBeTiGYg7SS2yJhOn3//+99JJpP87W9/y/3e3XbbbZxyyin8+te/zrlKFhYWctttt6GqKqNGjeKkk07ipZde2qmwaGho4Lvf/S6jRo0CYPjw4bnXCgoKEELsMPlz/vnn5x4PGTKEW265henTpxOLxXLiA+CnP/0pxxxzTO55IBBA07SduontT/LCIs/uYaTgrVvgjd+BmcIWGk94T+WHXSeSwANMpFp0cJbyEvznApjzJNRMPdCjzrO/iTQh/3E2wojzujWen8k53FvVhvsXvwHbZu5owWOHKNxwv0XV+uW8653I2HEN2O/chTLjUghVHugz+MyQ6o0SwnF5iug+iipG0PbI+7t0g+pj2LBh+Lw+EskEm9q3MNQ1lHZRQCm9CFeUIstxsbJFYJ9vrm1/CUQhbMSIews5/6a7WTX3Nd597N90NW7hnUceYv5TjzHx6BOYevJpuVocQhXoZT70Mh9M2CYi7JSJ0ZrIiQ2zK4Ve7sM9uAB3fQjFp+9sKDmEquAdU4x3TDGFkeGMnDeJN955kyWZDayPbOXuh/7KVO9Ijjz/JFxl+yasdkX30+vw4WeraystOG4X5alWOtWlDF+qc3bv0/xpwlEUFW9li6FhmlWUeKpZuOQljEwa3bVjis6BiMVizJvn1MCYYg7BVRlAtMQ5vnsc8/QNLFM3884779DY2MiXvvSlvS6ImOezw4wZM/r9NhxyyCH87ne/w7IsR5xKycTx49FVF72mSSkJFixbybpNW/nXI4/jRFoLpJTYts3GjRtZs2YNmqYxdeq2e4vRY8ZQEArtkIShj8WLFzN58mSKihyLuVBENnhbABLbTucSNCSTSaQrsIOwWLlyJRMnTuw3iTJr1ixs22b16tU5YdHnRtpHZWUly5Yt2+l7dPXVV3PhhRdy//33c/TRR/OlL32JoUOH7vJ9XbRoEddddx2LFy+mq6srd94NDQ39ftO3f4/+1+RjLPJ8OGtfgNtnwCs/BzPFKs8kjkn9kqu6TsfWfVx+5FBmDy/lh5k5vCEOAjMJ/zgTOtcf6JHn2Z9k4vDg2YhoE2vtar5pfou7gg2EbvkV2DYvTRTcd0w5I8VXufcYx1Q//f2lLGwagmanSL78637dGYbB6tWriUajB+JsPvVkoikKcIrjJXQXSlxhc9dWTGFRECqgqqpqp/uqqsq48eMAWKu24GoK0qk4N5NuvZci6QRZe/R9v8HUQ84MWqEVxVvYRFf364yefSRzbvwjp1z9A8rqh2Km0yx46jH+fMUFvPjnP9Lb1rLT/hSPhntQiMCMSgpPHUbp+eMInzQE75jiAUWFZZq0bljHkhee5vW/38PGRfP75cRXQy6KjxrMF374VS74wlep9JZgCIu3Uyt46f6n9spNZHdIrOnEF/NjSJPXxHqkaqKn3Uw+tpWWrYOZ3b0In53ilJWLCNU3YugGS6RjjapxDWfzkkW7faw+a0WpHWJwuJqyyyZSdsVkvMOKONgYxtGZCehobNmyhTvvvJMNGzZ8JOec57OFz+tFkRoGFm5hYtuSOV//Im+88W8WLHiPxYsXs2TJEtauXcvQoUNz37XtBYui9WWFsgb8Lnq93v4bFACBLh2hYVmxXH/JZBKXO0hfL6ZlYkuJlHKnEyjbb9d1fYfXdiZ4wMnU9P7773PSSSfx8ssvM2bMGB599NGdto/H4xx77LEEAgEeeOAB5s2bl2v/wRiwfbUk7wt5i0WendO9GZ77Iax6EoCIVsz/S36ZJ1KHoCoKX55Wy1VHD6fMo9Db0cVZkTTfaL2cx/y/YERinRPQfeGL4N83H+w8HwFGCtY+B4oG4TonNsYb3nl724ZHLobmJXTKIOdnruH3Yh2l9zpF8P47XfDWpCl8edXXkRmB4RrPiwfdzNELtsB7YByroC+5Hw6/CgrrkVLyr3/9i7Vr1wLOzM7IkSMZMWIEFRUVH4sUn5907JRNkd+p85DWdFIrtyuKN3bnblB9TJw4kffee4/NSjuR5W10qwGwISgiBLO+RYX+vc8I1UeoyBEWJVYv631+liy9kILQZIYMuZoRB89i+PSZbFq8gHce/RdNq1ew5IVnWPrScxTX1FFYWUVRVQ2FldXOUlWNN7DzuhrStuluaaZl/Rpa1q2hZf0a2jZtwNouwHLeEw/jKwgzcuZsxhx6JOVDh+cCHmumDOWiSZfx5kuv8/LcV1kQWcVB8zdTNq1+n9+HfuOUko7HVqIAr4v5pBQLYasMs5exJGVwzgbHTzotdYakm1jQOJKSsk2sbPQxxaqgPjCONW+/ybBpMz70WNFolHnv9VkrBtNU1MDCO15j2ufPoPSCcaTXdKM9vZGiNj8v6svoSsS4/2/3c8SRRzB79uz8d/UA0BesaxgG77//fr/Xxo4du8MN7vZs7y7zQVauXLnfhPI777yzw/Phw4fnZvT7jmPYNt5s7YoJ48eyauU6Ro4ch883aIc+R48ejWmazJ8/n+nTpwOwdt06eiMRpHS+3+IDrnoTJkzgz3/+M11dXRQVFeWK5Ol2GFOPoGkKtm3kRIAmJalsMT63zJA2bMaMGcN9991HPB7P3bDPnTsXRVEYMWLEPr1PI0aMYMSIEXz729/my1/+Mvfccw+nnXYaLpdrh2xOq1atoqOjg1/96lfU1jqpcOfPn79bxxmov4+KvLDI0x8zAxtfh1X/hSUPgZnERuUe+3h+HzudOF6OG1vOd48bRb1u0PP3e1j7939gdXdz17U/5UuJAs6JXs3T/p9S2r3RKZx23n/B5TvQZ5anj63z4bHLoGN1/+3uAgjXOiIjXOc87hMdyx+GVU+SlhoXp7/Nj6LrqH7pMQAemq3RWnc6x6w5HAloLgUyXszwt1lZ/0dGb1rP8rdqmPS5Bjqe+iklX/0r8+fNZ+3atQgJUjh5x5ubm3n11VcJBAK5H9shQ4bk013uJXZaUJqtuo1LJbqinQbFCczcnUDcqqoqSopL6OjsYG37JiLBABhQazk3NWmpUVWy72lYS0qrAQgTo2nN4dSPWkJvZBGLFp9LOHwwQ4dczeDJUxk8eSpbVyznnUcfYvPSRXQ0bKKjYdMO/XmDoZzIKKysJlhcQufWBlrWr6V1/VrSifgO+7j9fiqGjiBQWMyGhe+R6O1h0TP/ZdEz/6WwqoYxhx7B6NlHUFDmiN7ZRx/OyqUraI628cJzL/CVyRcgtP13g51a0YnSJdlCGxvdzngLeqsJnPA0VStcBGSS9+yR3CVP4y/qr/hSyyv8ccxJpNvjbEi2MVQrJ7JkK6ZhoO3iJhPgzTffxLRMyuwQRYU+nnzhVgBWzX2NkYfMZtZZX6X8yikEF7Ry6nNB3kwtZ43WxCuvvELD2k2c8ZUv4fPlf9//l5SWOq5/hmHsUK+htLR0l8JiV5SU7L9JwC1btnD11VfzjW98g4ULF3Lrrbfyu9/9rl8bRSgkpElBttr2FVdexLEnnMU11/yCb3zjUvx+PytXruSFF17g1ltvZeTIkRx//PFcdNFF3H333WiaxlVXXYU3mw7btkyUDwiLL3/5y/ziF7/g1FNP5Ze//CVloWIWvj2fqroaZhw1iUGDqti0aTNr166iqKgUTdNwa9mEEcIgaVicc845/OQnP+G8887juuuuo729nSuuuIJzzz13r1NRJ5NJvvvd7/LFL36RwYMHs3XrVubNm8cZZ5wBQH19PbFYjJdeeomJEyfi8/moq6vD5XJx6623cskll7B8+XJuuOGG3TpefX09GzduZPHixdTU1BAMBnG7d89Vck/JC4s8kI7Buhdg5ZOw9nlIb8slP58x/DB9HmtkLdPqC/m/E0Yzzu6h6/YbWffY48jt0hqmfnYdf/3VTXzpnSLOSnyHJ30/xdc4Hx6+EM66H5Q9C/pb8t4itm7awvTDZ1BavvdZMPJkMVLw6i+Qb92KkDadFNBCCdWig7DshXQvtPZC6/KddvG9zMV8vXkTg951ZkzvO6oAd/BixjXWAzD2c9W4JxTR+GQDret62Tr4CkLJP1PdupyWpSHKlEdpXXUJzz37HADTzeEMtcrZqnbSoHTQqHYRi8VYuHAhCxcuRFVVBg8ezIgRIxg+fDjhcDhftGs3sQ2FEtkDAnwenY1rNmLoFsFAkOrq6g/dXwjBxEkTeemll1irNqPpPkjBMKMRgC5C1NftfarZPopLyjGlgiZsetaPZebXb2bT5jtobHyQnp53WbDwLIqLDmPIkG9TM2YCXxwzjt62VjobG+huaqK7eSvdzY10NTUS6+okGY2QjEZoWrNywONpuovSwUOoHDqCiqHDqRg2gnDFtkBsyzTZvHQRK954hfXz3qG7aStz//UAc//1AFUjxzBm9hGMmHEoJ55+Cn+57y+sNRtZ+8xiRpwyZZ/fCwBpS7qf3kCCNC/ry0CAJ1FBfeVTbOkazCWRp0hLnfvLvsMVXziWv989n3PUFzl19bs8VONhwYYwQ6wy6rxj2Lx4HkOnzdzpsaLRKAvmOTOekxnCC4v+iuo2qRo1jC1LN7P67TdY8+5cxh95LDO+eDY13zuYk1+vZv7r7zJXrGL91o3c+JsbKfAGKAwXUlxZSlFpsZPWs7CQwsLCvb7JzfPJ5mtf+xrJZJLp06ejqipXXHEFF1988bYGUqIInYyQ+KST4nj8pKE8++yD/OxntzN79myklAwdOpSzzjort9s999zDhRdeyOGHH055eTk/+9nP+NGmjYBTyVv7wDyUy+Xi+eef55prruHEE0/ENE1GDxvJLb+5CU0r4LTTPs8TT7zEF75wKj09EW666SbOO/8cADykSRoWRWEfzz33HFdeeSXTpk3D5/Nxxhln8Pvf/36v3x9VVens7ORrX/sara2tlJSUcPrpp3P99dcDMHPmTC655BLOOussOjs7+clPfsJ1113Hvffeyw9/+ENuueUWpkyZwo033sjnP//5Dz3eGWecwSOPPMKRRx5JT08P99xzD3PmzNnr8e8KIT8qB9FPMJFIhIKCAnp7ez+9QWrxTiez06onYf0rYKVzL/UoRTxtTuEpcxpz7XGMKA/y/eNGMiPaQNe99xJ79VWnOBrQWuPnn1OSTF8jOWSVRAmF2HrDLVz4agdTWM1D3l+i2RmYdhGc+FvYzZvCxk1b+fO9f0FmvR2HBWuZNf0QBk0fgeLO6+E9Zst78PjlTiFD4BHrUH5qnEsPjtuIlxRVopNa0U616KBmu3WN6KSQCDdlTmf0Wpuhy9/GFnDfsUMo5xt40gE0t4L+uQruXd1MQ1eC6fWFfMn00ba8C4nF2BX3U9E2j4rZ3fy99hI6bKiyCvnK589EDblJLm4n+X4HZsakWemmQelgi95JVPbPue7z+aisrKSioiK3LioqyrtjDMCdX/8elwy6C4DnRs2h4/3RrFVbOPjggznhhBN2q4/e3l7+8Ic/AFBZ0MI3eh/MvbZc1lN0wdNU1X24SNkVjT1J9D+Mokz0cJPxba76+XUApFJNbNz0R5qb/4OUJgAlJUczZMi3CQZGDdhXJpWkp6WZriZHbHQ3NRLt7CBcUUXlsBGUDx1OSe0g1J1U5v4g6USCdfPeZsUbr9CwfEnud09RNYZMmUYqUMvKlg2UUcDF37kcLbDv1rX4gla6/r2ap/QFtKi9qIaPslQt+ufu5bQFyym0Y9xof4WzrvodtUU+/vPWKqY9ewqDlDb+WzCTRbEpnJA4mDI7yDz5T774m7t2eqynHvsv8xYvoMwuoLq9l82ZtxlzVhNCS1EaPpP1L6psWLAQcATZpONPZvoXvogLD+ufWMxTq1+lV0nstH+AUChEYWEhRUVFFBQUEAgEdlh2Vin9o0ZKSTKZJBqN5pZIJEI0GiUWiyGlRFXV3KIoyoCPdV1nzJgxA1d73ktSqRQbN25k8ODBeLIz8h/EMAyWLFnSb9vEiRMPuJg74ogjmDRpEjfddNOAr9u2TdvG9ejuAtJKiio6SQoXht/G56tH03buzjgQXU2NZJIJCsrK8QZ3fc9mp0zMjiRCV9DL/ZhmgkTCiQdNJsNYlkogXECox7lWblSHMLj8o0vN+nFiV/9ze3JfnL9D+6xgpqFtJTS844iJzXNhu7zPjUol/80cxPPWVBbJYUgU6ot9/GZ2Pce0Lafnx9+kYTtfzqUj3Tw81WRlbQrd9rC83ktRtJORjREG/eZHXPftX/GTN+Fb6Uv5o34LYt6fHNeaWVd+6FBt2+bxfz6CROKXbuIizbroFta9tIWaF4o5uGoCQ6aMxDO6CDWYd5PZJUYSXv4Z8u0/IpC0yTA/NC7gfc90fjssRU2hTqfmo1WU0yhH0JSUNEZSLOhN0RJJ0ZNwfM9dVobfr3iIoeuXYCjw4HFHUJs6HUWqxMtcvF0Ci97eFqz/3qZuussMLpxSTMfCTlaMmYOh+1hmKXTY4JIaR46dzsrmtyjR66k5dSzh04aRWtWFf3E7NatLkEmbHhGnQelkq6+bFrOLRCLB+vXrWb9+27FcLhfl5eX9BEdpaekBu1n5uBD3OmZuQ6qUpctYsAduUH0UFBRQX1fPpoZNmPT/rnWLIMPK9j3GojTgZp0soEz0gEtiGhaaruLxVDF61M8ZVHcxGzfdSkvL43R0vEhHx4uUlZ1Iackx+P3D8PmGoKrORdDl8VJWP4Sy+iG7dWzTjBGLrSIaW0kstpJ0upWy0uOpqDgVRdFx+3yMPfwoxh5+FLGuTlbNfY0Vb75K+6YNrJv3NiVDOtHdhbTRy7x/v8ohXz92n94LadpEXtjMYnUTLWov2JJQz2jKpt1OYI1FoR1jqT2Y+pO/R22R4370xZmj+P3qH3LVpm9zSu9brK2sYf6WDZwiD8LVWYoR6UAP7eji0tvby8LFToD3iHQhS3tfYMxZHQjNEQrtPf+k6ogxTDjhMub951UaV61g/n8fYemLzzLtlNOZcsYXuDw6jvalW+jY1ExHaye9iQgRkSAqkkREEkNYRCIRIpEImzdv3ul5ezyeHcSGz+fD6/UOuLjd7p1aLjOZDMlkkkQiQSKR6Pc4kUgQi8X6CYn95Xf++suvccYXz2DE6JH73JcVN+h5eRNGSZJUawyhmwiRDRQWwklopAgs29xhX5m2kEIFVXxsrbu2aaIKjYywCMgkCEipOi5VQVV3HgOyM/qqb1vmju/HDmRjLKTtTBJomg9dD2MYPXg8CeLxALZhkhIuPDKDasaRMvSxfS8/juQtFgPwibdYZOLQshyal0DLEmfdtgrs/hUgV1HPU8ZUnrOnsUbWoAjB1EFFfG50GUeVCgrfepnuB/6O2eJkYDF0hVfHwZPTBK2FGmMS05iZPAG9oRArY/Ne9b857+nXqOwGz9ix3HvW97lvYRvfcD3DDxQnyJcz/gLjv7jL4b/+35d5ecHruKRG5ZYWTCHJlFTS4TGR2e92hR1mklnP4Jp6fONK8I4pRivx7rLfzxwN7yAfuxzRtQ6Ah63Z/Nw4h295upn55qNYW7fusIvw+dAKC1GLilALw1BQSNofwli+FLF8KUmXiyeO/Aql6WlEhWRRtcq7MccH3KMr/GRygiNYyHdXDOHNWAVlQTdXVJURebcDl7uXpvASpIDJiSDrNr+SO66qaVSNHMOg8ZMYNGEyJRV1pFd0k1jSTnp9D0gwsegSMTqVKF16nE49TqcZwZID3xhomobL5eq/6C50XcelaOiqjq5olJaUMvnQqZ+6/Py//s61fD9wM80Usb7gUl7sTRDw+rn6u9fskYVn8eLFPPbYYwTDnVzT87fc9ieUQ/j8tc/ul7G+ce0sZivLucX1bcSWGSj1KUZPreWQg8fjzVoB4vH1bNh4M21tT31gb4HHU43fPxS/zxEafv8w/P6h6LozgyylJJ1uJhpdQSy2MickksmGAcfj8VQzaNAlVFWegaLs6IfcumEd//n5j0nFopRPOYZ1yW580s3lF12Cv2bvZ62jbzSy+ukFPOVaiBQQ7KqlpKSXxPB5XLr6aQypckP17Vx/0Vn9bnRShsW/f/M1zjWepI0CbvV+lTN6ZqGk4vTWvsLsq+/Y4VhP3P8wC9cvo9wuQN2wiLKj1hEc1IquFzFkyLfZsOH3GEY3iuJiyOBrMDvHM/efD9C+2XE78RWEOfi0s5hw9PG5OA4rliHTECWzJUp6cy+Rrd1EjBgRkSQiEiREmoTIkNJNkmqGhJXC2kXWnJ0hhMDj8eSEhmmaOQFh7s7N5QfwerwEXD78igef5cKT1vAkFYQNNjK72M5abPcYGwtJuxKhU4kigMNGH8LhXzwGRd1zK6q0JfH3Wmh+bhXveNYx7LBxVFdXo2kaQoKCgkCgZBdpSTa29f8fHlM2DK3P7VhTEKpAaEp2EaBmHyv770ZZ2hJp2CAlwqVy5OeO3KXFIp1MEGvvIaYJamUripD0+Nz4/dXo+p5bB6KdHcR7uvEVhAmV7No9U5o2RkschMBV7YgY284Qi68FaZNKBbBtLz4tRSjTTYcMESirx6N/uq4PA7G/LBZ5YTEAnyhhkex2hEPz0qyQWAoda4EdP9aoCLDUqudlaxLP2VPZKssIejQOH1HKsYP8TOvdjFg4j/g7b5NZt21GOBJQeHoKvDhJIWAOY2bieApbhtJgSjboFhs0m7SQTEtrqAX/4ZL/vkkoCf4jjuDaqefx6rpOfuH7B1+xnwTVBV99BGqng5GATMJZZx93tXdx+5MLMLEZEfHRkpxP4YiZZNYuJhKJIkvrSYaCOYFRYgeZZNYzyC7FXV9AyXljUbyf7ZlqMgl4+QbkO3cgkLTIQn6QuYDqTJivLH8Gkb0x6PVBdwBCSUEoIdE+ZOKuM1TK3OkX4TKrmOc1me+1yNgSkPxw+Fa+ZD5CYfO7ANgoPKUfw3XRU0m5ivhWbQnRLS8SU1JUpwNEN7yOxKZu/CS6mxuJdrT3O5YnEKRu7AQGTZhM7ZBxaE2C9NpujLYEVvc2tz0bm16RoENE6VRidKrOOiP7i+gPY/qwSZz41VP3aJ+PO7/+wQ/4vvt2limDWaGdycpMhmnTpnHSSXtWvDKdTvPb3/wW1dfCD6L357b/XTuGc/7ff/bLWJ/4yQl8XrzFn/RzyWw5PbddYmEXd1M7rYzDZk2muLSAaGwVjY1/JxZbTTy+HtPs2Wm/ul6Ex1NFMtmAaUYGbON2VxAIjCYYGI2iuNnaeD+ZTEfutUF1F1NVdVbOKtLHxsULeORX1zm/tKNnEZMpphaM4uRvn71X70FqXQ9b71nII9q7JEQaVyxGOHkUvmNu46Slyyi1e7mTM/jSd+6gOLCj2Nnc2k7q9iMYKbbynD6NxtSpHGtM5L2W2/j8TX9AC29LL9zd1smtt9+GjWRMp4tM1bNUHtyIQMXUvks0PpgjDxlOw+af0tn5KgDh8MGMHvVrNi/awFsPPUBPa7PzHnu8FNfUUlxdS1F1LcU1dRRX1xIqK0OgYLQmyDREHMGxOYLZsc29USLJYJLy2xhlGplCQTpgk1QzJFMpkslkvyWVSmEYH/7dVhQFn8+3zerh8uDVPXgUF17bhTet4YkJ3L0Cdy+ocuciQHg0VL+G4tNR/DqKT3PWfh3Vp6P4NYykwdPPPs0q0ym6NtxdwxfOPI3A0N236KU39dLzxHpWt2xgrr4KPehh1qxZOWExEJZl0dra2m9bZUUFutBREagoqFJx1llRkqNPcOhKvzXKzi0dUkrIigiZsbENC8swMU0LS1hIQEdFd7lQPRrCo6HoO763yWiEeG+SlGJQQzsZNDIhF37/iL2yDMR7e4h2tOPxBwhX7LpWkrQlRpNTE0avCuQEVjrdSjrdhpQK8XgYr0+nMNFIUrpIhYdT6P/0e0fkhcVHyMdWWCR7siJiMTQtcpbuTQM2baeIpdYglst6Vtj1vC/r2SpLAMHgEj/HDCvkaNqo2/g+qXffJblsGWxnEpYC1lcKnp8kWDVoEKOiMynvOoiNpspG3aJBszEH+P4XW4Lh2sv84L9P4TLBf+ZZXFx4JKtbI/wtcDuzzbd2enoSuNf+JpsVnTIriDfyPnr5YJSlkphvAerUYyhOx9mweAHJgmKMcClkZ17Dtp+DzMGMGjaKkjlj9+tszCcGIwVb3sF64krUnk0A/Ms4jJc6Z3Px5rn4Njo+o1GP4NkZo+ksnoKtKBhqioyaRMoUqpVGN1O4jCSedApvOo0vlcQla7D957BG03nTaxITEhWLb5Uv5Rwep6TX6TsjNJYERzIt4rjNxfBxk3Ea91vHcpC2lUlKL651i/FkAlQXruCEO95CVRW6m5vYvGwRm5cuZsv7S8kk+/tth8srqR03gZKaOsIllYRcJXgsH1ZnCrM9idGWcG5YLIlEksbEwMQUFgYWhrAw6XtsOs81SVJkWCmdm4HTj/kCE2ZN/h99WB89N117BVcpf+M1fQLvWkeSsBXOO+88Bg8evMd9Pfyv/7Bm3VtcmX4An3CE3d3uU7n4B/ftl7H+8+df5mzjaf6tHkHVswm6Q5PoKJlI3N+/1obpbiM80cusw6cwpL4aoQgMo4t4fD3xxDoSiQ3E4+tIxNeTSjf121cIDb9/WE5EBAKjCQZHo+uFWKZNT1eM7kiE2ppC2tr/w+aGu0mnHWuty1XKoLqLqK7+Mqq6LQPSW//+O2//50FcxfV0lpWgSoWLTz2P8sk7psrcFZmmGE13LeRZuZAWpQclnaSoqYKiaRsJGWs5ufct1tjVbD3zOT43rnan/Tz23J846a3vowuLn3nncGrPCbR2r6D2oPcZcdFfcu0eue0fLO1YQ6kZQI8/y9BTNgM2zYuPpneNEygrZRwxrIPZJ6Xoar8dy0qgqgFGjriW0pLP8/6rL/L2ww8S7+4acCya7qKwuobiakd0FNfUUVhVTdBfhN2cIb05QmZzhMzWKFgfuA3RBFrYg3ApCJfq3PS6VBSXiqVJMsIkpRiOKJEGqhS4TRWXoeHOKKgJiUxaWHEDmTIHmmfrh+LT0LLFFrVSH3qZF63MhxpyIYXEzGQ+sKS3PTbS2JZNRf0wFj43n1fWvIMUkhI7yMnDj6Dm8+PQCnaefceKZOh9ZiPdixp5W1/DOtX5nxtUV8fkSZOoq61B1zRs284uEiltbCmxLIvGlv7Cory8fKfWV5ETGyJn+RAIlO2fKwqKpqJqzvuOIrANCzNjYJomprSwsHMLA11qJagojshQdNweF5rXhXCrCCGIdXYRSaXwiDhFxOhR/fgKS3C5inb9Qe2EVCxKT2sLusdDcfXOvx/giCOjMSssKv2IrGVJSptYfA3SNkinvQi9iJLEZqSEFt9wKgv33EXrk0ZeWHyEfCyERarXERFNi7cJia6BixI1yHKW2YN4367nfTmY9+16OnDMiTWFXsZUBJkYkIwxOqnr2Iy+ZCGJ+fORyf6Bsc1FgqWDYHm9TmtxHUWJMYQjM2k0PWzQLbrU/v8qlQUejhhZxjH1KqlYL99/roeo5Zi16+31/PKZ+wgbCTxXXMVZPUPpjUb5T8EtjE8vzPVhCY0kbmK2i/etmbyhjUCRghH2JooPewtFNTHiRXS8/3nEogBSew3joHHUDZrM1gVzaYzEyRSWgurM6MwyRjLt0BmET9jzm6ePNVJCvB16t0Dv1uzSCL1bsHqcbWpi26x/syzitpbPc9TmLVRucrLjxD1uXpk6g7j/cNrsYjboNhJwyeyCyD521u7s2oUgISSvewzaNImHNBcH32KO8iRFaWfWMq54eaDqFBomXcghdSNZsvwFTln4aybEnDoVG+1yfm5+FUNUc1BbEE/IpnLwg3iGbyBQUIXHXY7bXY7LXY5LLyXRZdKxsYumFZtpXNGAbe7oLiEUhYLScsKVVRRWVBEur6IwWEFADePGi+JSwaUg3AroAnQF4RLgUpCKBCmxLZsX73mGJbF16GhcdMGFlNVWfPSf50eNlNz90zlcLB/jce9MFiWn43P7+M73v7tXge7r1q3jH/+4l/PshxiEk272Vv85XPHd2/fLcP9207f5Ws9fedc1hltHfBVvIsWozeuoWN1FQXsZPUUT6A0NBrHd2KWNJI6txpHuDIrfxlXoIlQRIlxRSEmJj6JgAq8nSiIdpKPbR1trhN6mbmKtcYweGxI6wvQjpB8h+m4wUsiiTgbNLGfUqC20t99HKuVkwtL1IupqL6Cm5qtoWgDbtnj0V9ezcclC9OEz6dYyDNWr+Or/XYRQd29yw+xK0Xj7Ap7JzKNF6QHLIti0kWDh8RgHPcnlq5/BkoJbhtzBt8/78i77klLy4M1n85WeZ+mVPu4SP+T0+HC2tn+fmb/+N67yEbSvbOSOf/4ZW0gG9TZRdexraG6T3k0H0/zeBWAbaGYMs+8mT5pQupDRx7yGtJwJhNLSYxk18meoSgE9LU10bm2gc+sWOhu30LW1ga7mxn61QT6IP1xIqKyccFkFBaUVFLrLcdsxMsYGEtZqUj7Hqqoli9BTJejJYrRUMXqqGD1ZgmL6+s++7wbCo6H4NbRCD0qxG9tvk3aliRMhnugm1tNFvKuTWE838e4u4j3dZJJJbGv3XKtUTWPEjEMpGXkQL7/7DikrjUfqHGNPYvjh4wgcVuP8JvV9VqZN7K0mIi810Gi085q+grhIIxBU+V3YDRuZ9KVzqKooR9+JULClpKmnvyWusrDASbkqFKRwhAFC2e3EKdsG2KcZBFLs4jZRZv/Y2YlJRR3wWIoUjtDQXJiWQRyDCtmBLiy6PQHChUNz38E9JZNK0tW4FVXXKa2r//D2TTGwJVq5D2U7FyfD6CGZ3IJEkEoWEba7cEuDJqWSqopPwXXhQ8gLi4+QAy4snv9/8NatA77UYJeyTA5mmT2EZXIwy+3B9BLApSmMLAtwUNBmgtnF4HgbRR2NyE0byKxdh9Xbu0NfvX5YOkiwsq6QrYXjsazhaKk6kmaALgXaVBtju98HRcDBdQWcXtPLbO8GQt1LkFvewx9xgvJ6hhzPTxu/yCO9zoye205x+aLHOHrLfOxrf86XVvpIGhbHDRI0xW1WdliY2fwBg5JtHF+wlbRiMdodofCgp1jLSOJmIfWeFRTTRbq3kvZlpyNWBfEaL2KPgPIjL8LYsoGF768gGQiDhKOMcRx05mH4Jn3yU9TKTW8Sf+J7eHvWotqZD22fkG6e65gCa72MalgFQG+gjLkTj2CzfzprNJV1ujWgtenDCBPlQvdLnKs9R4Hl/D916GH+XvtF7GkXcvaQoVS4dJrWrOKdhx/kva2NVI8t5eyef1JmOBWg37DG8VDRyZw89l50bXd9oQWqKMDOeDGiPuIdCr1bDeLtCpmIC2nvm3XK5wkjBk+lhR6K9BDfuOZy3J6PJr/3R45lQPdmrJbl/Ofh+zhLvszfg0exNjqBgyZP4ZQvfHhawoGwbZvf/fpGjsr8iynSEYt/LLmcy7/5i/0y7H/ffwtfWv/j3POM0FgRGMqC4BiW+4eRNnT8jVEqNysUR0tJBEdgK/s3841iGyiWgalvV5NB2lh6I2XTllFR/w6W5QhpTSugrvbr1NVdRDqe5oH/u4pYIkN00HCkgLOmn8LoEw/60GNacYPGOxbwVORtWpVehG3j3bwav1lJ4ORWjlnzLlWyk/vVUzj9e/fi342MeB0d79B8+yWMtzfyqpyIL/0jmhtfYvrUBWi103hxUSWr1W6KTMHQ8f/BU5og1VXHqjf/j1UVHQSrN+ImTVOskrItpVT3FqNbgLApHPkgZeNfRwgbl6uE0aN+SUnJ53YYg21bTkrgrVvoatySEx49LU2kEzFcQQNfWQpfaRJfaQpvSQrVtfuxFsJyo5nF6EYpulGCkC5sxcAWWbukyGDJ7RYrgy0NbNvETFlk4hIrrWKmVay0gpVWnecpFSujYqVUbFPwwel4VdNQdReay4XmcmfXLsxMhq7GLbl24cHDiISr6E0lUKRgpjmSscEhFJwwGO+EEtJre+j573pS7THma+tZrjn76tJG37waNRnHW1jMtHMvoq6uDk82WF0oAiGU3NqybVav7z/hOGxQHUJKLNPAMk1s03RcmARZsdFfaEiRPc++oPC+xx9ESoSUTrFU6SzCtnOZ0vp/QAKpKE4BVkXNHrN/E0tIamUbFgpmcRVu996nrjYNg46GTQghKBs89EPdqYyWONK00Uq9/bJMSilJJNZjWUkMw41maRSYvbTLAkqqBn/qA7iTySSbNm3KC4uPggMtLF792w0cseFGtsoSltmOiFgqh7DcrqeHINV+jYO9SSYQYUiqi7KeVtyNm8ls2IA9gIAAsIGGYp33K0pYXj2aJv8wkrKclB2iR4h+AmJ7hrgTfKW2nVnejZTGlxFqW4LL3DG9oIWCio2hulngOZtr2j5Ho+pc+Md3rOeb7z+O9zs/4Lz56dzvkK4KZg8Oc/am12iKxNlUpBH2Jegtb6W2Nc1ZiZfxk2a1WsvrxZOJ1poM9y+Hjkral56OaCggFHkBd9Va9CPmsGThShKeAIoUHG9PZtI3jsRVs2dp6z42GCniz16Hd8GdYGYvgoZChxGi0wjRbQTpNXzEDC8J041t6ajCRUkyxqhNy5AIWovH8Nz441jprWW1bpFSQMFmsljL5/3LmelahSZsDHQy2SUtdVLopG2NlNRJWBoJSyMkI5ws3sAnnbolDZ4K/j3kq5RPP4/ZtkHn6hU0rl5B06r3iWVdI8YUzqSxNMAaf5Spoff5QttzuDGxpOAh/Ui21ldRsnYUQipo3m40by96dq15OtG9PajeKELZ+Q2HlArYxYh0IZmIh1irpHtziniHwDZ2I9hOCJCSkL+Krtp6kiLDyJIhnH35uR/fi4iUEG2BzrXQuQ7ZsY5062roXIcrugVFWkgJL4ipHMt8/lR0Ao1dozj33HMZOnToXh/2uaeeRVn0G44xFwBw34gbOO8r39ovp3T/m2vZ+MzNzFRXMEVdR5Hc8XesUytgUWg0i4KjaZchjLSOkdGxky60lIYvJfCkNLyGC93yoNk+VALOjY00sWUUQ42T1tIkXBkSPouk10LxpdHdBrrLRCgQi/lxt4QojRYSNEpR+q6QwiJU+yrF457HHXD+x4PBcUwYfwfdW6P889rvo1dPpMMvKCbEpd/7Jppv537Zdsai6e6FPNH2Jm1KL8Ky8Daswe9xEagdQcD/Hmf0vs4mu5yur73ClGG7n9b3uZe+weFvPIwHg19yPod3TqAy+SNk+CQe8NdgC5txtS9TOLiJtlQt72w8mwn6e5ze9jyFZjTXz4LgGJ4vnMn6+FgK2odQ327jLWigavpduMOO5SocPhhNCzo3vDgz5UKouedCbNuWSm6hN7J0wLgYy1BItntItHtItHuRlkAPGLgCBq6gs9YDBrrvf1M9WKCh68W4XRV4PFV4fFV4PVW43ZV4PJW43ZW4XMW5WfaWdWtY/PzTrHrrNSzDQAqFTO0wMn7nHmK0Wc0McwSuIh9WV4oOEeU19/t04yS/0LvbcLduRVUURh5yKBOOPYmoDWVlZRQXDxyrsTvpZqWU2JaVFRmO2LBME2lb2JaNbVvYlpV7vu0N6BMcZIWEEzSv6jqqpqPqWnadfa5pCCEwjQxGOu24i6UzGJk00raRioqi6qCo2AJ0kaSECL3CR6himPN/spdI26Z1oxMXWlY/ZIcieTu8b20JZMZCK/agePtPUGyfftYwwhSnO0hIF0rZqE99AHdnZydtbW2MGDFiB3e6vLDYRw60sLjvpcXc/Pz7+DIwXYsyzu6lPtVFSU8rnpZGrOYmZ9YgiyUUut0BOj0FdHhDbCgK01BURru/lF49TFwJkBJuDHbmd2lTTTeTPZ1M9HUyzNdBpWihJLGe4viOWVMiqp8FoTEsDI2lpWwyWs1UMr1bOW3hrzi0x0lf2KVXcH/3V7lVnYwpBJpt8sWGtznkq6fyZtLLYSNKOSS2mdW/+jVv+Ifz6tDRZITNl3iVr6nP48/6cdtSoGxnhl2uDuaN8DjSdSlqopKeZV9AtgUp6XwZT+E7LCo7lIy/AF2qnKxPZ9xVR6Luh9zy/0tk8xIi/7iAYM9aWlYF6VwR/NDA6j4Mzcu8wcfw3OAZrPK4iSiSAmIcrizlWNdiDlOWELKjH97RTljuH8Zzg7+CTx9KyYpFtKxeuUM8hKppHDL5DHq6Vd4vfoO6uvcpLGokniykaJWLQ6NO/EUvfv5S8kXa5CAMK4BpBcjYBUgrgGaoeAyJx7DwiRgBtZuAqx1vsBl3qBlXsAVXqBlVTw80TABMw4VpFyPcdQQCwwgH6ykpHEJBcBAeTzWq6sK2LZ6742ZWvP4ypUUT2FjmRgrJ0VMO49DP7zgTe0BpXEDv09fja3kP3UoO2ETaEEt52BQrJVPq4SCxlj+WnUysexzf+b/v7VPmq9bWVl7/60V8Kf0aAC8c9XeOmX3yXve3PevbY3z57ndoi6YBSY3oYLJYyyRlPZOVtYwVm3CLnVu4EoqHbi1Ijx6iRwvSowfp0UL0agFSwoXXTlNoRigyep3FdNZhMzZgf11aiCXBkSz3D6PZrCASGYSaqKe6G/wZi2DNPMqnPIjmjqNpRUyccAeb3mvjxXv/THr4JAxhc/TgmRx63sDpZ6UlafrbEh7f+Mp2omI146ZOp3lJJ4kjtnDFuv8C8JeRt3HBl8/do/czFlvNUw9ezVmNbxKXbv5g/paDl71D+4SJrNFaGFy2AmVYEy2No5nduoIZqW1FMbfKEpplEQeJtf1+e5f6h/NCeBbrUzMIt5YzqfpfFI14EbErN5mdIC2FTHclie6hpLqHkOiqJ5Kqoser0OMzSHhjCGkg0ja+dApvKok3FceTiuO2orh8vbj8UfRADJcvhlABqYLUQGpIWwOpI6UG0oVEz601zULTM2iuBKorgeKKI9xJVHcS4UqjutMIdfesJ0LouN3leDzVlJYcRVXVmRhJyfJXXmDJC0/T09ZKpriCTGk1CEGFHeZzmXGs0ZpZoK1HAsI08DRvIuzSmHj0CYw/6jj8YSezWHNzMz09PZSVleHz+XaY7MgYBmtWr+63beTIkXtdx0JKiW1bSMvOihADaUk0VUVRVYSi7NoBTVVRXK5cX0hnbVkmlmFkY1IMLCNDQEvgFiY9rkLC4V0HXO8OnY1bsC2LcEUVumvX13yjO4VMW6ghF6pvx/cqHm9EyjiG5aIwnUBKiASGUOAfuJ7IJx3HUpOgra2NcDhMZeWOn0deWOwjB1pYbPzt70j+7V6kYdHr9tPpCdHlKciuQ3R6QrT5C2n1h+lxh4ir3qw5c2dICohTKnqooZtRoo0RnnYGudqopIXSTBP6Ltxs1vjqWBAay6biiWSqp1FePY5JBQHGB7z4s/mjpZS80NHLa2/+jctW3UJ12vH3X2lP4lfJr/Ka6gRhVqZ6+PqRI1j29jIWxhQaA6UUEeEi7al+gmKVPozNE76FUjWDxvmPMqT5WQ6RS9HFtjvsRWIYbxaMJeorpXD1dJQeP+H2x1g/soCMP4hH6pxWfBgjLpvlZLv4uGOZRF++Ee/c35JqUVizqJhO3zRWjTycTlWnRypEcZFRvWQUlYyATNba1Pc4ISQ9qs0IsZXPKYv4nLqYKcoaNLZdKLu1IK8XTWdt4UEYqh/FzqDaGccVxDbQbAPVzqDZBpp0nivSYp0yhPSSRoo7WvoN2+X1UjViNNWjxlI9agyFooxVT/+DhsH/oiA7qymESnnZKQyqv4S1S1fjee6HjGbnee2Twk1E8RFV/URUP1HVT68WoFsNE1HDxCkiYReSwYutCFQ9TdjdSrFnE2X+dfi97TvtG0DaAjPuwUh6SWeKEL0z2fTmPMpqZrM+mERBcO4Z5zB4/LB9+ED3E10b6HnyWsIb/osRV0j16GSSGl3JIN1JH8mUBimJN2XiS2ZQpcQSKsEz0wwSbdxc+QUGFZ3KqV86/cOP9SH89Tdnc37iGQC2fGMRtZW7Vy9id5BS0tybYnVrlLWtUVa3xFjbFmVtawzLSDFabM4JjXFiE8UiQgHxfje/e4MtBT346ZZBMugMEU0DipgWrYglwVGscQ+hxRxG3KhhVv1fKCpYi7QVBg+9ltVPtbFxTSvtRT480sUVl1yGvzK8w3m2/mcFDy97jnYlgmLZeBtWc8QXz0ItLWXx4gf5XOuL1MtWHtGO5eT/ewjXXvx+LV12DZmn3mBqajXv2qN4N/VDFrk2oXhjzJaL+bzxFkXCEVaWFLxkT+Ef1udYHzwYVJVUVzPHqfM4QXmPGcoK1O3e55Weel4KH8omORGvNFCzyRwULBRsVGznsbCy221ULKJWIW3pOgyp4XZ14dM6KVDaKbLaqMi0UZ1qpTrTRlF28iMtNLrVED2qIxq79BC9WoioGiQhQiRkgLQdxNqDklwWOqbtxrZcmNKDtNzYlhvbdKNaGi5T4rIzeJQ4fq2HgNaJ392Ox9eF5u1C93Wj+brQPJEdRJVlaCSiE6gc+wPGDR5H68olLH7uKdauXUeyajCoKkKSy2qoRboZHPIy7fiTGTp1Bqqm0R3PsLaxlU0L/4qMLqSwdCbh8oNQFG2HuBLbhq5Iku39jIoCWQHiJOwDtvdwkrlaGBLFKWUl5baSVn0D2841SrJd99nzFds9dp70P5Dcficp+j2WgK2YFMgIEoEMVqKo+57JMdbdhW2a+ArCaB8iLOy4gZ2xULwaimfHY9u2STrdihCgpUDDIqoVEgx8Qj0gdpNwOExFRcWA1vq8sNhHDrSwuOxHv+LdeD09Lj/WTgItBTYhEhSJKEVEKKWXatFLheilTOmlROmlWO2liB7CsgfXh6TfNITKZk8Vm7zVbPFVEwnVYxYOxVt3EKNLqhgfcOPPpEnFoqTiMVKxWHYdJR2LobpcjDviaBR/gHs2bMZ64/dc2PAgbmlgovJi5kR+mjqGJte2Qk05QaE9jx9HUKz0D4Yj/4/RB53Vz8/TsiXL1mxg7Vv/pG7rM0y1lucudLYUzBcjec1zENGuI6lubqWjuhHD4yYgPZxRO5XBFx69rx/LR4rsXE/XA+cTalrK2iVhOrvH8cLkU5lfEGaIuogiEUHDRsNEy16kNWGhYaFio+NcwP2kOERdQY3o6Nf/Sv9g3gpMYk2ynGRjkqrmBvSdBCUKBIrQUIWaW6tCQxM6qqITCBRSWjOE4vJawiUVeAN+UkYDSWs9CbmRHvNd0sFNznlJleqqL1Jffwleb13uGFs7o/zz7l9yePIFCokRFAmCJHMZh/aGXuGnXS2kXQvTqYWJqEESqp+k5iaj60i3RHoyKJ44qiuFpqXwKXGK6cSXMmidX0rb+yECQ4+gyRXFLzxccsVlBIsOUAKHeAe9z/0C/9L7sHph06oQkaYymgoG0+0N0+sJ0eMOEXX5ibh8RHUPMd1NTHMRU1VeD1yIV2S4pfILnPC5HzN8+PB9HtLtv5/DZZFHSUkd17WtH+pysD+wbcnW7iRrWqM50bGuPUZv0iCezCDTUQIySpg4BSJOmBhhEaMg+zxIgig+umWQLoJ0ywBdMkg3QbpkENsdxu9xEfTo6JqgqaOXqsxGJiobGC82MFHZwHCxFU30n8W2EbwYOJh55eMZXfUiBSJKtHkYXYsG0+mtJa4aTA6P5AtX9Q+4bnt2Df9+68mcqAi1bWLWWXNoXPUm8VVNiPo2vhJ5iSZZROyCNxmxl5XNk8ktPPvymRwzfwMBmeIh8wjqRBuHqCtybZpkMS8HTiAy8iyGDxvJxNoCyoLOrOyWrgRvre9g7rpOVqxbz0GpdzhReZeZyvv9JniiwouJiiWyEkI48sIUWXkhFExUbKESsmJUWh39Jjo+ThioJISHpOImqXiIKV6iqo+Y6ieueEkoPpKKn5TwkRFeDOHCVDRsXaIVtlBWsJoasRXVliTX1RFfEUaoEKgfRXdSY2M0g6W7wbao8ekMGVKF7NpK94YVpDoEmt+NZ0gM79DNKIFeeigkgwubACiFWGhYUkPaAilVhBRIW8GWCkIKhLAQqo0QFopiogkDXRjotoVqKKgZDcXQwXSDYiJVC0u1sTUbS5XYisRSwVIULKmBLbClAgjkdipDyqx46HstdxeZjdtwWm+HzG51ri/uTS7OjL/GQjGc7sCPOeS0YfhC++ZZ8NydN9O0eiWzvvw1Rkyfucu2kZcbSCxqwz+9kuDsgb9fr712PS73m6RXDuKI+GIe1Y9h+pk/pjToxqV9+lyidF3fpUU7Lyz2kQMtLH74s9tIJBopFhEKRZQSYhQrUYpElLCIEiZKAdE9/nHu0kK0u4rY5K2i2V9LLFSPWVAHvkpcSojCZIxQdwd6Zyvx7k5i3V2kYjHS8RjpRPxD+3f7/Ew/9UtMOeHzdEnB3UveZfo7P+PYrred44siHm05hNeMMZxa/B7H+97N+ewvDwxl1bBRnHLCbbjdH55ybkvDRpa8+gDVm59isrUytz0ifTysHM7C1GEUiTaECwptP6foLdR8fgaucaflMkh9LJCSyNw/ob/wYzrWaWxcP4rHx53JmmIXJ+gv82X1ZQrFwO4auyIlXLxdMImV6jhi8SEMadepSEk0xYWuuHG7/bg0DyoqAsUpvCSVbNrBgcWsxMbwdpAObCUT2Eo6sJV0sJGMrwWU/r5alqXS0T6aY4/9HSUlA8/6x9ImL6xooSdhkMhYxNImqVQKMxlFpnohHUFJR1AyMTQjgm5ECBhdFNNLqeilJLuU0oNL7J3fdRQvjWoprxRPQh/cRll7J01v1ZEom05ESVHjLePr3/nG/7Z4XiZB9LVb0N++GTozrFtVwDJjJv8deSwdBRmqlRaCJAmIJAES+EXKeZ7bliQokkxWnMKIt5WdySUX37FfKpH/7YEf87V1t9BIMdXXDZyl7n+NlJK0aRNLm8RSJrG0STS7jqWd/y2fSyXo1gl6NEJeZx306ATcGuoHUlNLKWmLplnbGmNdmyNiGlo60NqXMyi1mvHKRiaK9QxVmnP7LNWH8krNeKqq38fXotLw3nhai2pQpOCi08+jcmI9AB1zN/HP5x6mQ4miWpJgyxrKMnFUz1DSB0Vpd5tcteExVCH517ibOPOLX9+n92bV6utYPO89zl43N7fNQuFd/zjc0y9l0qwzUXfj/0JKyZrWGHPXdbB4zUYCm1/gSPsdDlOW7tJFbWcYUqVFFtFEMU3SWVpFCQlPFUawCkLVCEVDJroQqW7UVA9apge30UPAjhEWcQqJEhZxCkQMnYG//x+8uRGACwMvaXwijY80XtL9hNK+0CP8rNAHs8FTSUcwgF2YJmi48C0ejL7WRlHX0FtiUxBJIZI1mKKWdLWH2OgWkoUtkBF4YhYlyRg16XZqzDa8Mo1LGrgx0LMTS7uLiUIKF0ncpITLeSzcGEJDlyYuaeLCwCUNXBi42fZc3UdL4O5wh3o+duMpeAI6R31tNPUTdqwQv7s8e/tNvP/aixx69tc4+LQzd9k28uJmIi824D+4gsLTBp5wWb16Ges3fIX25sGc1/AaW2UJT1gziUofaVcQ21OAGgjjChYSCBdRWFxGWUkZdaVF1AY8SJw6I7ZpYJuZbYtlYJsG0jSQlkFZyI2uadmA+mxQfe7xBxZVB+/eF+DcF/LCYh850MLiyTsv5+SWB3arbVT10aUX0OYqos1VRMRdTMJbQtpdRMZdiKWGsIQPTB1XKoGntxtvayNWRyvRzk7MzLYZYk3o6IoHl+LGpXrQRNZXMvtXKia6X+AKqLj8CppfoPkkmscm0t3BxqVdGHGdYEkph579NUbPOpzFsRSPvfEg5y3/HUOSTspGEyX347g0MJy5g4YxsqKbQw/+Oy7Xnv+wdDZv4J3nbmFswzPU246bji0FL9mTeUI9nLQVYKJU+HL6UVzFbbhnXoI+4QzwhMEVyNXC+F8jI820PnAR/pXv8N77I3h00Dn0VqQ4S3uO45V5uRnSLa5y3vcNy80IWsKZ/bPRABVb6ICWXXQycgjFsfGMjGgIaWOIFIanm4y3A9PTienpwvR0YXt6kIqZnUeS2ZSC262RILLPFQvF24lQB7Z82ZabTKqETLKEtm43HR11nHbaeYwZM2a/vme2LYmkDLriGboTGbriBt2xNLFIJ0ZvC3a0FRlvQ0l0oBu9eI1e/FaUAhklLGK52ewQiR1caFJS53n3NFZUlFPT0cmm+EGYAqYUlvP5S88Dl28no9pPWCaJefdjvfRzlJYeFq6r4vnA6SwcNIiD3Qs4VX2TCcrGPepyg7eaxfVzOP2s7+2XIa5b/xjuv3+HhQWj+MKVT+6XPj9J9CQyrGuLsa4txsZVixm87j5OFa/hEc73ooliniicgau6C3P1GJqMELWiiPN/fAUd767mn888RacSRbMgtHE96WEjsYuaGB5rYFZmOeU42dOedR3Osf/3OMo+1uNJZzp4/Y3DMNfUMCLSyLLSWtw1KY487B94PFUf3sFOMC2bZY29zFuzhXjHFqRtg20hpeXUQ5IW2CZIJygY20ZIE+nyo4Tr8BVVURryUR7yUBZyUxZ0U+DVPzRhQp+IjKQMIkmTSMogmjIxtws6/uAdjfzA/hnLJmXYJA2LtGGRzFhkMinsdBwrHUdmEshMHDJxFCOOMGKomRi6GcdlxXHbCbx2gqBI4s+K+TBx6kXLgAIlI1XWiRpWa7VsUupoywwhoHZQoTRSY7VQbzczSLbm/of2BFsKp7gcOhYKLgw8ZD5SUWBJgY2CnbVi9D22c8lpHfuG4jg6oWSvLwoy2xIUIWmkhN4znmLZExE6tjiTZ+OPrGHm6UPR9iJI+o0H7+O9x/7NpONO5qjzL9ll29jcRnr+uwHv+BKKzxk98HlaFn/56zcoq1rMKfPWo+6moDOkY6nTMPf/51A9FS56af/2uZt8ooTF7bffzm9/+1uam5sZO3YsN910E7Nnz95p+9dee42rr76a999/n6qqKr73ve9xySX9/4kefvhhfvzjH7N+/XqGDh3Kz3/+c0477bTdHtOBFhZP/PcPFKz/LxG9gLgeIq4FSasBUoofQ/FhSjeWdCFNBT1t4ElE0bs60Du7IZZElRq64saluNE/uGgauluieSSa20J1mwiPgdANpB7H1OOYegxLj2HpcWwtidSS2FoS1N2Ymeopo3uDh7YNKsHQGA7/6gXUjp3Aww1NbH7pD1y69X58doolgRE8WXMwI0rfoQSNQw59DI973/JES9ti7su/RV3wBIck389tX2NXc788lkZrHGc3PMRRdfNQXdv+7Q3Vh6n5sVwBpB5AuoPgDiLcARRPAYrLi1BUhKpvt6gI1YWi6ghVA0XPpdZzyF4g+1L6Qc44bEtI9LTC879g3qoyHgmci7+qhXO1ZxmvbMqN693gOBaX1lNRsRq/GseydAxDxTI1LEvDsnQsS8O0dOzsc9vWcOlpdHcMtzuOx53A5R440HdvsG2FRDxMPBF21tnHmbRv2zkDkyZN4tRTT91vx91XDMsmmjKJJJ0bkUgiRTLSSTrWSc+a15nY9B/GsW0GfqWs4ynvwTTIKsIphS9YKxgyNkzhxJNBc7MtJeMHUzRul6pR0UDzOO01b3bt2bZW9VxWqvSKZ4g/9SOUhq082ngIr5YeTVnJVk5R32KWss3tz0RhpX8IMcVHQvWSFG4SioeUcJMRbjK4MXBj4cJXsZ5QsJUJlT9j3PjD98v7aJpRFi36GiWlRzO4/vL90ucnmUTG5JUFK+maezfHRx+nVDjZrOLSzePuQ1nsHo474mN2Cla6fGwWBoYuGSI2crC5gqlyVT9rW1y6meuewdjz76L6QyoI7y5r193IpoY7AVBQmDL5bxQWztgvfX9WsW1J0rBIZBxhEk0bNHX0EtmynGTTEvzdK6lJrmWEvYkCsWMWxYFIS43NooJGrYZO7yCM8BA8xcPQfSFUlxtN96C5POhuD7rLi+7x4NZduHUNj66gqwoZ0yZtWDy/rIF7Xl2JMFMUukzOnFjCsCKVTCqBkYljGWk03Y2e6kJb/xLuZDsuDNS6g9EPOgc9WIrm9uByedFcbhRFxULywKoH+NuKv2FLm+pAFdfOuJaxJWNyP3ki+3sobJvu++6l8867EJaFXl1N9a9/iXfcuGzohkAToKgKlmHz9mPrWfKSk3K3uDrAsReOpajSv0efyaJn/8vL99zF8Okz+fw1P9xl28SiNroeWo17aAGlF03YabvHH38MW/6StFlAMFGAXwTRMxn0ZC+udBRvJorPjBG04wRkfLesSRmpYjqyw3ERRKBgoyBxqwK3iiPGpL3jUjMVLnxxj96X/cUnRlg89NBDnHvuudx+++3MmjWLu+66iz//+c+sWLGCurq6Hdpv3LiRcePGcdFFF/GNb3yDuXPnctlll/Hggw9yxhlnAPD2228ze/ZsbrjhBk477TQeffRRrr32Wt58800OPvjg3RrXgRYWb911H0ZjE6qwQXV8JlElKDZCsZCKBartuJ8oJqg2UjOw9RSWmkRqKWw1jVTToKVBTYOaQWgZhLLvJl/T1DFNHct0YVouTFNH1zIEQ+390l/LhI/ezUF0ZRwzTvwuekkdNzz8BqaxjmFlzzPWvwQ7HeTQI57E663Z53H1YdsZliy5nea5z3BY52ICWXerXunjn9aRvNk7mSGJNfhI4RImbgw8GLiFY272Cue5V2TwCQNNWNtmXYREkX0zMX1zNtsWK+t/akrh+BkDtlQwUbCl469qAV1mgOdcJzOibA1fVl+iVDhFjpLCxSulB9FRY1MZ2JA97r5jWSqZTADDCGIaIUwzhGWHELhQFBVFURCKmnu8ba1tW4tyVLUCVXWhquqAi6IouFwuhgwZsl9cb/5XWLbk3XdeJP7Wn5gdfTk3exiTHh6Xs3hZTGdW8zLOLX0aXd8//uE2AlO4sNDY1ObjvsjnSVQHOEZ/l2OUBf1mMBcGRrK8eBBqeRsVnq17dJxDZy3F7d6zi3SePaetq4c3n/oT4zb8jRHSyaZnScHzcipvKJMZZW/kcJYwSGnrt99GUcmK8MH4x57M1ENOJODfv5+VaUZ5c+7hWFYvI4b/mNraOfu1/zw7xzQtmhvWsXbV60Q3v0m4ZyPVRjtdapAObxlGuJ7iuoOpH3IQZTXDcbn2X22WTR1xrvznIpZsdcTuV2fU8f9OGrNjytRMAl75Obz9R0BCoBxO+j2MHjjj28LWhfzfG/9Hc7wZTWhcPvlyvj7266jKjlaGxMJFNH3nOxhNTaBplH37Koq+/nXEAF4Cm5d38tJ9K0hGDTRd4dAzhzPm0KrdTvu99t23eOL3v6By+Ei+8rPf7bJtcnUXnfe8j17pp/zKKTttt27dOh57/CYmTXomd2/jcpVRWnIUpaXHUFg4A0XJ1jySEjJxIpEunmnt5J/tUValbAxFw1I0jigt5KK6Kg4pDOTOybBsnl7WzJ/f2Miyxm0pto8eXcYFhw5hxpCi/ucv5Z4XOtxPfGKExcEHH8yUKVO44447cttGjx7Nqaeeyi9/+csd2n//+9/niSeeYOXKbT71l1xyCUuWLOHttx0//rPOOotIJMIzzzyTa3P88cdTWFjIgw8+uFvjOtDC4sl/fA1vxdwPb7iXSCkwTRem6cIwXJim23luuDGyjy3TjS29jnjICgnDcGbMd6h0k0XXkxQWNVFcvIXCwiZUdZuIkYYGycEMGXs2K1fchSfYhpn2MX3aPyksGfuRnKdpxlix5m7WznuJqVvXMshoBZyL/Uo5iDQ6JiqGVLFQMdh+JkHBRMuZNfu9fx84zvZhan2CQ8maflXhiA81azDue81HmkOU93Ozlc1aMW/WjEFUdlCotwMKBaGJFBUfRnHRbHS9EMtKYFpxLCuOZcazz2NYVgLLjGOacVLpXlQ1hNdTjddbjc9Xg89Xk823/jGty/AxIx7p4P+3d+9RTV35HsC/J09IgADhEaIIFEXq0FbF0cEqynTqY7zVVWtrO07VPrzLdtnxOVY79tK6qlXv3PruWGc5aLtmxHastauL6YitYq2WWoytg7W2CAQFRHwloiSQ7PsHGI1EBAMksd/PWlly9tk5/sKPwPnl7LP3F5+sRkrJP3GPo9LVfsiZgn81DoQNyqap3ZvLSQmehl80t+P60ICmNvebGq/Nl5IiL8MYeaHb/TQl6m44bLgH9pg6xAab27vOMAAJOl0WBqRvbNczyUtCYP++7ZAfWI0M23ctdtugwGF1b9R064/0Yc+he0K/Tg/Jav0eV+vNiI4awd8DPuR02mGxHoVWcw+Uys4fK29vdOL/8n/AOwVNV2NTYkOw9qn+6G3wMLtRxSFg54tAbdPq6kh7DBi9AtC2HJ5ssVuw+OBi/Lvs3wCAgYaBWDpkKWK1sS36OiwWVL36P7D+u6mv9sEHYVz2JhTRLRfEq7tkw2ebj6Hi+6Yhgcn9ojH896kI0t6+4Ko8cRxbX52HsOgYTFv3t1b72iusqFl/BPJwNeIWDLxlP4fDgT//+c9QqU8iI6MR9fXfwOG4/jtaLg+BXj8M0VG/QVRUFhSK699XIQS+uHAZGypq8Pn561O83xcSjP+Oj8a4mHComgssIQS+Lj2Pv35Ris+On3H9PUnrFoZpQ+/Bb++Lg1Lu25ktA6KwsNvt0Gg0+OCDD9yGKc2cORNHjhxBQUFBi+dkZmaiX79+WL16tattx44deOKJJ3DlyhUolUr06NEDs2fPxuzZs119Vq5ciVWrVqG8/NbTW97I14XF7n/Ngx2fQggZnE65a9YH4ZRDCE8PBQA1IAVBkoIhk4Ihk2kgk2mgUGghl2uhUGihVIZBqQyBSqWDWh0ElUrleiiVSrdtRfNiN54IIVo8Ll++jIqKCpSXl8NcVo7a82cQHl4Nvb4CkfpTUN80HKehQQ2DlI2+IyZ2+vfTZq/FjyXr8O1/vsY9FZeQYfnP7Z/URUzBvVCcYEBE1ElolaGIic6CPioTkREPQqnU+Tq8nzXhdOLAl6tg//pjDLF+d8sbRDtSjTwCB2PuhcXgQExIKRSSgEbTC+Hh/aEL64swXV8EB/Vo/tBKcj2a3qsynjT6mUOHN6B233Yk1VXjZIgBjQndkP7LJ2E0jnAtrEZ0p65evYq//c39JPrZZ59FcHCwW9u+E2cx5/1vUXvZBrVChvmjUvELY8tzG8lhQ/dvV8NY/FdIwoGGID3M/eejPqTlCBIhBAos3yDn7MewCTtCZBr8PnoMYhQeJl8RAhEHjiHun/sha2hEY2gwqsYPQUN4y6tzQgCWswm4UNUTgAxy5VVEGk9Armj9HpRGWz1OHf0aWtO1TQAAEPtJREFUkCTE9rqv1Q/2g4QGgxqGwYFGHFUUtXrcMlk9amWNiHDKESPkUEVUQhVVBnV0OeTq68PchFMG+wUj7LUJaKxzLxqrg3TYE9MHX+t7okHWdCU/zH4FmWe/xz2XayDd8HHlxQYVjlqiceJyJBpF0+8IrdyOtLBa9A+9hJdfnNdqvJ0lIAqLyspKdOvWDV9++SUGD74+NdjSpUuxZcsW/HDToi8AkJKSgqlTp+KVV66Pnztw4AAefPBBVFZWIi4uDiqVCps3b8bvfvc7V59//OMfeOaZZ2CzeZ7K0mazue2zWCyIj4/3WWHR2NgIp9PZNDxFklz/BpIrV67AbDaj/KdSlP14EpcdxxGhPwW9vgIKhR31P07AYzP/p4tjKkPBpy+hVn0VzisaNN+j3LSyrrNpWm6peVsSonlb4Mb7r6Sbrle4ZUWI5mnAJdfU3beaztspAQ6tDZFyGRJ6jkGP+NHQansFXJ5/DhoaLuGTj1+C49I5RFuuf1rlypQQrW+j6efIzU3bFrUG541yhGprEKZORly3IYiOHoiw0PugUIR02Guhrme1HsPZ2s8QGzMaWq0frItCd42zZ88iJibGra2mpgbRHq4G1F62Ye7736LgROtr/ADAfdJJ/K/yHaTKKm7bt0yhwMsxehxTq2/bt1utwKyPHEi4fQiwhPZAcZ9ncTW45WvxRIhG2C6uaVNfpUyN8Qmz2tT3tOw8/qUyefofERJ6Dnp9BaL0FdBoL3no486KEHyOEdiF0bgo3Wb2S7sD8oo6KMx1kOxNw29lKoEfsn/rk6sX7SksfD4I+uYTKSFEqydXnvrf3N7eY7755pt4/fXX2xxzZwuksem3otFokJqaitTUVABNV6jM35fiP3u/xlWbDf/1wlO3OUJnxJSIUY9+jM/eXY8LdSfcF/u54QvJ7eumMY0SpOZPGGXNg1yaij0JMkjStYfUdG+C1DQftEyhhkKmhFypglyuglKlhkKlgkIZDJVajbiUftDqbj+1LvmWUqnDo4+9i33vr0YNCl3t1xbHau2TGQkSINx/ZppXCmn+2Wn6OkgdhYcznkR03C9YXN5lQkP7IDS0Y2dHI2qvqBA1cqb+EpsPlOGDolNocNz6XrGruB+zxCo8ad+OzIYvW50RSXICS6qBXJ3AQU3rvw8RBrz9lAyjvxRILRO36WxGj5I3UWMYh7qQlNu9PACAUPRBo+P296A5BPCj5Shig26/RowWAj2kCFyUeZgE5WJ3XLjYHRdKMqAKvghdVDnCosxQKOs9HksJYCQ+x0MowCHFL7FPmQmrdItF9xQAkgDRQ8BeDdSXAxG6qz4fEtUWPjuDjYqKglwuR3W1+yq+NTU1iI1tOU4PAAwGg8f+CoUCer2+1T63OiYALFy4EHPmzHFtX7tiQR1HpVKh5wO90fOB3j6NQ5Ik/GbKDJ/GQIEp84mZvg6BiOiOyWQSnh2ShGeHJLXxGW1fWHZRewJ5sT2d22NSpxw1rROOOa6d/YUQqG/wz4Ulb+az0kelUiE9PR35+flu7fn5+W5Do26UkZHRov+uXbswYMAAKJXKVvvc6pgAoFarERYW5vYgIiIiIvI1SZIQrAqMFb99OuZmzpw5ePrppzFgwABkZGRg48aNMJvNrnUpFi5ciNOnT+Pdd98F0DQD1Lp16zBnzhxMmzYNBw8exKZNm9xme5o5cyYyMzOxfPlyjBs3Djt37sTu3buxf/9+n7xGIiIiIqKfA58WFhMnTsS5c+ewePFiVFVVIS0tDXl5eUhISAAAVFVVwWw2u/onJSUhLy8Ps2fPxvr162E0GrFmzRrXGhYAMHjwYOTm5mLRokV49dVXkZycjG3btrV5DQsiIiIiImo/n6+87Y98Pd0sERERUWvaMysUkTfac17s/7eXExERERGR32NhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXvPpAnn+6trSHhaLxceREBEREbVktVo9tqnVah9EQ3eza+fDbVn6joWFB9ferPHx8T6OhIiIiKhtkpOTfR0C3cWsVit0Ol2rfbjytgdOpxOVlZUIDQ2FJEmt9rVYLIiPj0dFRQVX6Q4QzFngYc4CE/MWeJizwMOcBaZAypsQAlarFUajETJZ63dR8IqFBzKZDN27d2/Xc8LCwvz+B4PcMWeBhzkLTMxb4GHOAg9zFpgCJW+3u1JxDW/eJiIiIiIir7GwICIiIiIir7Gw8JJarUZ2djZnYQggzFngYc4CE/MWeJizwMOcBaa7NW+8eZuIiIiIiLzGKxZEREREROQ1FhZEREREROQ1FhZEREREROQ1FhZ3KDExEZIkuT0WLFjg1sdsNuORRx6BVqtFVFQU/vCHP8But/soYrqRzWZD3759IUkSjhw54raPefMvY8eORY8ePRAUFIS4uDg8/fTTqKysdOvDnPmXsrIyPPfcc0hKSkJwcDCSk5ORnZ3dIifMm39ZsmQJBg8eDI1Gg/DwcI99mDP/8/bbbyMpKQlBQUFIT0/HF1984euQqNm+ffvwyCOPwGg0QpIkfPTRR277hRB47bXXYDQaERwcjOHDh6O4uNg3wXYQFhZeWLx4MaqqqlyPRYsWufY5HA6MGTMGdXV12L9/P3Jzc7F9+3bMnTvXhxHTNfPnz4fRaGzRzrz5n6ysLLz//vv44YcfsH37dpSUlGDChAmu/cyZ/zl+/DicTifeeecdFBcXY+XKldiwYQNeeeUVVx/mzf/Y7XY8/vjjeOGFFzzuZ878z7Zt2zBr1iz86U9/gslkwtChQzF69GiYzWZfh0YA6urq8MADD2DdunUe969YsQJvvfUW1q1bh0OHDsFgMODhhx+G1Wrt4kg7kKA7kpCQIFauXHnL/Xl5eUImk4nTp0+72rZu3SrUarW4dOlSF0RIt5KXlydSU1NFcXGxACBMJpPbPubNv+3cuVNIkiTsdrsQgjkLFCtWrBBJSUmubebNf+Xk5AidTteinTnzPwMHDhTTp093a0tNTRULFizwUUR0KwDEjh07XNtOp1MYDAaxbNkyV1t9fb3Q6XRiw4YNPoiwY/CKhReWL18OvV6Pvn37YsmSJW6Xgw8ePIi0tDS3T8VHjhwJm82GoqIiX4RLAM6cOYNp06bhvffeg0ajabGfefNv58+fx9///ncMHjwYSqUSAHMWKC5duoTIyEjXNvMWeJgz/2K321FUVIQRI0a4tY8YMQIHDhzwUVTUVqWlpaiurnbLn1qtxrBhwwI6fyws7tDMmTORm5uLPXv2YMaMGVi1ahVefPFF1/7q6mrExsa6PSciIgIqlQrV1dVdHS6haSzj1KlTMX36dAwYMMBjH+bNP7388svQarXQ6/Uwm83YuXOnax9z5v9KSkqwdu1aTJ8+3dXGvAUe5sy/1NbWwuFwtMhJbGws8xEAruXobssfC4sbvPbaay1uyL758c033wAAZs+ejWHDhuH+++/H888/jw0bNmDTpk04d+6c63iSJLX4P4QQHtvpzrU1b2vXroXFYsHChQtbPR7z1vna814DgD/+8Y8wmUzYtWsX5HI5Jk+eDHHD2p7MWddob94AoLKyEqNGjcLjjz+O559/3m0f89b57iRnrWHO/M/N33vmI7DcbflT+DoAfzJjxgw8+eSTrfZJTEz02P6rX/0KAPDTTz9Br9fDYDCgsLDQrc+FCxfQ0NDQojol77Q1b2+88Qa++uorqNVqt30DBgzApEmTsGXLFuati7T3vRYVFYWoqCikpKTg3nvvRXx8PL766itkZGQwZ12ovXmrrKxEVlYWMjIysHHjRrd+zFvX8Obv2s2YM/8SFRUFuVze4tPtmpoa5iMAGAwGAE1XLuLi4lztgZ4/FhY3uHbycidMJhMAuH44MjIysGTJElRVVbnadu3aBbVajfT09I4JmAC0PW9r1qzBG2+84dqurKzEyJEjsW3bNgwaNAgA89ZVvHmvXbtSYbPZADBnXak9eTt9+jSysrKQnp6OnJwcyGTuF8iZt67hzXvtZsyZf1GpVEhPT0d+fj4effRRV3t+fj7GjRvnw8ioLZKSkmAwGJCfn49+/foBaLpvpqCgAMuXL/dxdF7w1V3jgezAgQPirbfeEiaTSZw8eVJs27ZNGI1GMXbsWFefxsZGkZaWJh566CFx+PBhsXv3btG9e3cxY8YMH0ZONyotLW0xKxTz5l8KCwvF2rVrhclkEmVlZeLzzz8XQ4YMEcnJyaK+vl4IwZz5o9OnT4uePXuKX//61+LUqVOiqqrK9biGefM/5eXlwmQyiddff12EhIQIk8kkTCaTsFqtQgjmzB/l5uYKpVIpNm3aJI4dOyZmzZoltFqtKCsr83VoJISwWq2u9xEA17ljeXm5EEKIZcuWCZ1OJz788ENx9OhR8dRTT4m4uDhhsVh8HPmdY2FxB4qKisSgQYOETqcTQUFBonfv3iI7O1vU1dW59SsvLxdjxowRwcHBIjIyUsyYMcN1MkS+56mwEIJ58yffffedyMrKEpGRkUKtVovExEQxffp0cerUKbd+zJl/ycnJEQA8Pm7EvPmXKVOmeMzZnj17XH2YM/+zfv16kZCQIFQqlejfv78oKCjwdUjUbM+ePR7fU1OmTBFCNE05m52dLQwGg1Cr1SIzM1McPXrUt0F7SRLihjsgiYiIiIiI7gBnhSIiIiIiIq+xsCAiIiIiIq+xsCAiIiIiIq+xsCAiIiIiIq+xsCAiIiIiIq+xsCAiIiIiIq+xsCAiIiIiIq+xsCAiIiIiIq+xsCAioi63d+9eSJKEixcv+joUIiLqIFx5m4iIOt3w4cPRt29frFq1CgBgt9tx/vx5xMbGQpIk3wZHREQdQuHrAIiI6OdHpVLBYDD4OgwiIupAHApFRESdaurUqSgoKMDq1ashSRIkScLmzZvdhkJt3rwZ4eHh+OSTT9C7d29oNBpMmDABdXV12LJlCxITExEREYGXXnoJDofDdWy73Y758+ejW7du0Gq1GDRoEPbu3eubF0pE9DPHKxZERNSpVq9ejRMnTiAtLQ2LFy8GABQXF7fod+XKFaxZswa5ubmwWq0YP348xo8fj/DwcOTl5eHkyZN47LHHMGTIEEycOBEA8Mwzz6CsrAy5ubkwGo3YsWMHRo0ahaNHj6JXr15d+jqJiH7uWFgQEVGn0ul0UKlU0Gg0ruFPx48fb9GvoaEBf/nLX5CcnAwAmDBhAt577z2cOXMGISEh6NOnD7KysrBnzx5MnDgRJSUl2Lp1K06dOgWj0QgAmDdvHj799FPk5ORg6dKlXfciiYiIhQUREfkHjUbjKioAIDY2FomJiQgJCXFrq6mpAQAcPnwYQgikpKS4Hcdms0Gv13dN0ERE5MLCgoiI/IJSqXTbliTJY5vT6QQAOJ1OyOVyFBUVQS6Xu/W7sRghIqKuwcKCiIg6nUqlcrvpuiP069cPDocDNTU1GDp0aIcem4iI2o+zQhERUadLTExEYWEhysrKUFtb67rq4I2UlBRMmjQJkydPxocffojS0lIcOnQIy5cvR15eXgdETURE7cHCgoiIOt28efMgl8vRp08fREdHw2w2d8hxc3JyMHnyZMydOxe9e/fG2LFjUVhYiPj4+A45PhERtR1X3iYiIiIiIq/xigUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXmNhQUREREREXnt/wGboGimIt7FuAAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "plot_attention(nf.models[0], plot=\"all\")" @@ -1912,18 +1285,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxYAAAGGCAYAAADmRxfNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAACT5UlEQVR4nOzdd3hT5dsH8O/J7qZ0F0pbdssQaBEBEVA2IjhReUFE8IegMpyoyHDgQERFRBREHIiKAwWVMkUpChUQ2bultHTR3SZtct4/0pw2NG2TNGka+v1cVy7ak5OTJz2lyX2e+74fQRRFEURERERERPUgc/UAiIiIiIjI/TGwICIiIiKiemNgQURERERE9cbAgoiIiIiI6o2BBRERERER1RsDCyIiIiIiqjcGFkREREREVG8MLIiIiIiIqN4YWBARERERUb0xsCAit/Xuu+9CEAR07tzZ4v1Hjx7F/Pnzcf78+Wr3ffnll1i6dKlzB2jFOCZOnIioqKgGGUdDOn/+PARBwJo1a+x6vCAIePTRR+vcb8+ePZg/fz5yc3OtPvZ7772Htm3bQqVSQRAEmx5rq/nz50MQBGRlZTntOaqKiorCxIkTG+S57HHgwAGMGTMG4eHh8PT0RMeOHbFw4UIUFxe7emhE5AAMLIjIba1evRoAcOTIEfz111/V7j969CgWLFjQKAKLmsYxd+5cfP/99w0yjoYUFhaGxMREjBw50qnPs2fPHixYsMDq4ODgwYN4/PHHMXDgQGzfvh2JiYnw8fFx6hgb0vfff4+5c+e6ehgWHT16FH369MH58+exdOlS/Pzzz7j33nuxcOFC3Hfffa4eHhE5gMLVAyAissf+/ftx6NAhjBw5Eps2bcKqVavQq1cvVw/LZm3atHH1EJxCrVbjhhtucPUwqjly5AgAYMqUKbj++usdcszi4mJ4eno65Fj2KikpgYeHB7p37+7ScdTmyy+/RGlpKTZs2CD93t98881IS0vDypUrceXKFfj7+7t4lERUH5yxICK3tGrVKgDAa6+9hj59+uCrr74yS6dYs2YN7r77bgDAwIEDIQiClJozYMAAbNq0CRcuXJC2C4IgPVan0+Hll19Gx44doVarERQUhAcffBCZmZlmY4iKisKtt96KX3/9FT169ICHhwc6duwozaTUNQ7AcipUaWkp5syZg+joaKhUKrRo0QLTp0+vdlXemuevSc+ePavNJnTp0gWCIGDfvn3Stu+++w6CIODw4cPStlOnTuH+++9HcHAw1Go1YmJi8P7775sdq6ZUqB9//BFdu3aFWq1G69at8c4770jpQpZ89tlniImJgaenJ6677jr8/PPP0n3z58/HU089BQCIjo6WfrY7d+60eKwBAwbg//7v/wAAvXr1giAIZmlDq1evxnXXXQeNRoPmzZvj9ttvx7Fjx8yOMXHiRHh7e+Pw4cMYMmQIfHx8cMstt1h8vqouX76M++67D35+fggJCcGkSZOQl5dnto+t5/27775D9+7dodFosGDBAum+qq9pwIABZr/jVW9Vz81///2H0aNHw9/fHxqNBt26dcOnn35q9rw7d+6EIAhYt24dnn/+eYSHh8PX1xeDBg3CiRMn6vwZKJVKAICfn5/Z9mbNmkEmk0GlUtV5DCJq5EQiIjdTXFws+vn5iT179hRFURQ//vhjEYC4Zs0aaZ+MjAzx1VdfFQGI77//vpiYmCgmJiaKGRkZ4pEjR8S+ffuKoaGh0vbExERRFEVRr9eLw4YNE728vMQFCxaICQkJ4scffyy2aNFCjI2NFYuLi6XniIyMFFu2bCnGxsaKa9euFX/77Tfx7rvvFgGIu3btqnMcoiiKDzzwgBgZGSkd02AwiEOHDhUVCoU4d+5cccuWLeLixYtFLy8vsXv37mJpaalNz1+TZ599VvT29hZ1Op0oiqKYnp4uAhA9PDzEV155RdrvkUceEUNCQqTvjxw5Ivr5+YldunQR165dK27ZskV84oknRJlMJs6fP1/a79y5cyIA8ZNPPpG2/fLLL6JMJhMHDBggfv/99+I333wj9urVS4yKihKvfjsCIEZFRYnXX3+9+PXXX4ubN28WBwwYICoUCvHMmTOiKIpiSkqK+Nhjj4kAxO+++0762ebl5Vl8zUeOHBFfeOEFaVyJiYni6dOnRVEUpXN03333iZs2bRLXrl0rtm7dWvTz8xNPnjwpHeOBBx4QlUqlGBUVJS5atEjctm2b+Ntvv9X4c543b54IQOzQoYP44osvigkJCeKSJUtEtVotPvjgg9J+tp73sLAwsXXr1uLq1avFHTt2iH///bd03wMPPGD2mqv+jicmJoqDBg0S5XK5uGfPHlEURfH48eOij4+P2KZNG3Ht2rXipk2bxPvuu08EIL7++uvSsXbs2CGdl3HjxombNm0S161bJ7Zq1Ups166dWF5eXuPPQRSNvxPNmjUT77rrLvHMmTNifn6++NNPP4l+fn7iY489Vutjicg9MLAgIrezdu1aEYC4YsUKURRFsaCgQPT29hb79etntt8333wjAhB37NhR7RgjR440+0Bvsm7dOhGAuGHDBrPt+/btEwGIy5cvl7ZFRkaKGo1GvHDhgrStpKREbN68ufi///3PqnFcHVj8+uuvIgDxjTfeMNtv/fr1IgBx5cqVNj+/JVu3bhUBiL///rsoiqL4+eefiz4+PuK0adPEgQMHSvu1a9dOvP/++6Xvhw4dKrZs2bLah/dHH31U1Gg0Yk5OjiiKlgOLnj17ihEREaJWq5W2FRQUiAEBARYDi5CQEDE/P1/alp6eLspkMnHRokXStjfffFMEIJ47d67W12vyySefiADEffv2SduuXLkienh4iCNGjDDbNzk5WVSr1Wav/4EHHhABiKtXr7bq+UyBxdXnc9q0aaJGoxENBoMoirafd7lcLp44caLa810dWFzN9POqerx7771XVKvVYnJystm+w4cPFz09PcXc3FxRFCsDi6t/Tl9//bUIQArOa3Ps2DGxY8eOIgDp9vjjj0s/ByJyb0yFIiK3s2rVKnh4eODee+8FAHh7e+Puu+/G7t27cerUqXod++eff0azZs0watQolJeXS7du3bohNDS0WppNt27d0KpVK+l7jUaD9u3b48KFC3Y9//bt2wGgWmefu+++G15eXti2bZtDnr9v377QaDTYunUrACAhIQEDBgzAsGHDsGfPHhQXFyMlJQWnTp3CoEGDABhTdbZt24bbb78dnp6eZj+fESNGoLS0FHv37rX4fEVFRdi/fz/GjBljlvLi7e2NUaNGWXzMwIEDzQqrQ0JCEBwcbPfPtiaJiYkoKSmp9jOPiIjAzTffXO1nDgB33nmnTc9x2223mX3ftWtXlJaWIiMjA4Dt571r165o3769TWNYt24dnn76abzwwguYMmWKtH379u245ZZbEBERYbb/xIkTUVxcjMTExDpfC4A6z8v58+cxatQoBAQE4Ntvv8WuXbvwxhtvYM2aNZg8ebJNr4WIGicGFkTkVk6fPo3ff/8dI0eOhCiKyM3NRW5uLu666y4AsKq+oDaXL19Gbm4uVCoVlEql2S09Pb1a29CAgIBqx1Cr1SgpKbHr+bOzs6FQKBAUFGS2XRAEhIaGIjs72yHPr9Fo0LdvXymw2LZtGwYPHowBAwZAr9dj9+7dSEhIAAApsMjOzkZ5eTnee++9aj+bESNGAECNbVWvXLkCURQREhJS7T5L2+rz2mxl+pmGhYVVuy88PLzaz9zT0xO+vr42PcfVr0WtVgOA9FpsPe+WxlqbHTt2YOLEiZgwYQJeeukls/uys7NrfO2m+215LTV59tlnkZ+fj99++w133nknbrrpJjz11FNYunQpVq9ejV27dtn0moio8WFXKCJyK6tXr4Yoivj222/x7bffVrv/008/xcsvvwy5XG7X8QMDAxEQEIBff/3V4v3Obk0aEBCA8vJyZGZmmn3IFEUR6enp6Nmzp8Oe65ZbbsGLL76Iv//+GxcvXsTgwYPh4+ODnj17IiEhAZcuXUL79u2lK9n+/v6Qy+UYP348pk+fbvGY0dHRFrf7+/tDEARcvny52n3p6ekOe032MH1QTktLq3bfpUuXEBgYaLatpkLz+o7BlvNuyxj+/fdfjBkzBv3798dHH31k8blreu0Aqr1+ex08eBCxsbHw8vIy2256bf/99x/69+/vkOciItfgjAURuQ29Xo9PP/0Ubdq0wY4dO6rdnnjiCaSlpeGXX34BUPuV1JqufN96663Izs6GXq9HfHx8tVuHDh1sHre1V3QBSB2GPv/8c7PtGzZsQFFRkVUdiKw1aNAglJeXY+7cuWjZsiU6duwobd+6dSu2b98uzVYAxiv1AwcOxIEDB9C1a1eLPx9LswwA4OXlhfj4ePzwww/Q6XTS9sLCQrNOT7ay5Wdbk969e8PDw6Paz/zixYtSmpCzOeu8JycnY/jw4WjdujU2bNggdWa6+rm3b98uBRIma9euhaenp8PaBoeHh+PIkSMoLCw0225KtWrZsqVDnoeIXIczFkTkNn755RdcunQJr7/+OgYMGFDt/s6dO2PZsmVYtWoVbr31VmlF7pUrV8LHxwcajQbR0dEICAhAly5d8N133+GDDz5AXFwcZDIZ4uPjce+99+KLL77AiBEjMGPGDFx//fVQKpW4ePEiduzYgdGjR+P222+3ady1jeNqgwcPxtChQ/HMM88gPz8fffv2xb///ot58+ahe/fuGD9+vO0/uBrExcXB398fW7ZswYMPPihtHzRokJQuUzWwAIB33nkHN954I/r164dHHnkEUVFRKCgowOnTp/HTTz9JtQKWLFy4ECNHjsTQoUMxY8YM6PV6vPnmm/D29kZOTo5dr6FLly7SuB544AEolUp06NDBppmlZs2aYe7cuXjuuecwYcIE3HfffcjOzsaCBQug0Wgwb948u8ZmC2ed9+HDhyM3NxfLli2T1vAwadOmDYKCgjBv3jz8/PPPGDhwIF588UU0b94cX3zxBTZt2oQ33nijWntYe82cORNjxozB4MGDMWvWLAQGBmLv3r1YtGgRYmNjMXz4cIc8DxG5kEtLx4mIbDBmzBhRpVJJrVotuffee0WFQiGmp6eLoiiKS5cuFaOjo0W5XG7WpSgnJ0e86667xGbNmomCIJh1JSorKxMXL14sXnfddaJGoxG9vb3Fjh07iv/73//EU6dOSftFRkaKI0eOrDaG/v37i/379zfbVtM4ru4KJYrGzk7PPPOMGBkZKSqVSjEsLEx85JFHxCtXrpjtZ8vz1+T2228XAYhffPGFtE2n04leXl6iTCar9pyiaOz4NGnSJLFFixaiUqkUg4KCxD59+ogvv/yy2T64qiuUKIri999/L3bp0kVUqVRiq1atxNdee018/PHHRX9/f7P9AIjTp0+v9tyWuh7NmTNHDA8PF2UyWY3dt0wsdYUy+fjjj8WuXbuKKpVK9PPzE0ePHi0eOXLEbJ8HHnhA9PLyqvH4VzN1hcrMzLQ4jqrdrOp73k33Vf35oEr3patvVc/N4cOHxVGjRol+fn6iSqUSr7vuumrnztQV6ptvvjHbXtO5tmT79u3ikCFDxNDQUNHDw0Ns3769+MQTT4hZWVl1PpaIGj9BFEWxgWMZIiIiAEBZWRm6deuGFi1aYMuWLa4eDhER1QNToYiIqME89NBDGDx4MMLCwpCeno4VK1bg2LFjeOedd1w9NCIiqicGFkRE1GAKCgrw5JNPIjMzE0qlEj169MDmzZur1XIQEZH7YSoUERERERHVG9vNEhERERFRvTGwICIiIiKiemNgQURERERE9cbibQsMBgMuXboEHx8fCILg6uEQEREREbmEKIooKChAeHg4ZLLa5yQYWFhw6dIlREREuHoYRERERESNQkpKClq2bFnrPgwsLPDx8QFg/AH6+vq6eDRERERERK6Rn5+PiIgI6fNxbRhYWGBKf/L19WVgQURERERNnjXlAS4v3l6+fDmio6Oh0WgQFxeH3bt317hvWloa7r//fnTo0AEymQwzZ860uF9ubi6mT5+OsLAwaDQaxMTEYPPmzU56BURERERE5NIZi/Xr12PmzJlYvnw5+vbtiw8//BDDhw/H0aNH0apVq2r7a7VaBAUF4fnnn8fbb79t8Zg6nQ6DBw9GcHAwvv32W7Rs2RIpKSlWTd8QERERuYOioiK8+eabZtueeuopeHl5uWhERC5eebtXr17o0aMHPvjgA2lbTEwMxowZg0WLFtX62AEDBqBbt25YunSp2fYVK1bgzTffxPHjx6FUKu0aV35+Pvz8/JCXl8dUKCIiImp0MjMzERwcbLYtIyMDQUFBLhoRXats+VzsshkLnU6HpKQkPPvss2bbhwwZgj179th93I0bN6J3796YPn06fvzxRwQFBeH+++/HM888A7lcbvExWq0WWq1W+j4/P9+q59Lr9SgrK7N7rER1USqVNf7eEhERETUmLgsssrKyoNfrERISYrY9JCQE6enpdh/37Nmz2L59O8aNG4fNmzfj1KlTmD59OsrLy/Hiiy9afMyiRYuwYMECq59DFEWkp6cjNzfX7nESWatZs2YIDQ3lmipERETUqLm8K9TVH5ZEUazXByiDwYDg4GCsXLkScrkccXFxuHTpEt58880aA4s5c+Zg9uzZ0vemtlo1MQUVwcHB8PT05Ac+cgpRFFFcXIyMjAwAQFhYmItHRERERFQzlwUWgYGBkMvl1WYnMjIyqs1i2CIsLKxa+khMTAzS09Oh0+mgUqmqPUatVkOtVlt1fL1eLwUVAQEBdo+TyBoeHh4AjP8vgoODmRZFREREjZbL2s2qVCrExcUhISHBbHtCQgL69Olj93H79u2L06dPw2AwSNtOnjyJsLAwi0GFrUw1FZ6envU+FpE1TL9rrOchIiKixsyl61jMnj0bH3/8MVavXo1jx45h1qxZSE5OxtSpUwEYU5QmTJhg9piDBw/i4MGDKCwsRGZmJg4ePIijR49K9z/yyCPIzs7GjBkzcPLkSWzatAmvvvoqpk+f7tCxM/2JGgp/14iIiMgduLTGYuzYscjOzsbChQuRlpaGzp07Y/PmzYiMjARgXBAvOTnZ7DHdu3eXvk5KSsKXX36JyMhInD9/HgAQERGBLVu2YNasWejatStatGiBGTNm4Jlnnmmw10VERERE1NS4dB2Lxqq2fr2lpaU4d+6ctFq4O6lp7Y/Gfuymzp1/54iIyDm4joW55TtPIyNfi3mjYjnT72C2rGPh0lQounZ89913eOmll1w6hsLCQjz66KNo2bIlPDw8EBMTY7b4IhEREV17DAYRb205iTV7ziM9v9TVw2nSXN5ultxbWVkZlEolmjdv7uqhYNasWdixYwc+//xzREVFYcuWLZg2bRrCw8MxevRoVw+PiIiInKC4TA+9wZiAU1BajjA/Fw+oCeOMRRNjMBjw9NNPo3nz5ggNDcX8+fPN7k9OTsbo0aPh7e0NX19f3HPPPbh8+bJ0//z589GtWzesXr0arVu3hlqthiiKGDBgAGbOnAkA2LlzJwRBqHabOHGidJwPPvgAbdq0gUqlQocOHfDZZ5+ZjUMQBHz88ce4/fbb4enpiXbt2mHjxo21vrbExEQ88MADGDBgAKKiovDwww/juuuuw/79++v1MyMiIqLGq7C0vPJrbXkte5KzMbBwAFEUUawrd8nN1hKZTz/9FF5eXvjrr7/wxhtvYOHChVLLX1EUMWbMGOTk5GDXrl1ISEjAmTNnMHbsWLNjnD59Gl9//TU2bNiAgwcPVnuOPn36IC0tTbpt374dGo0GN910EwDg+++/x4wZM/DEE0/gv//+w//+9z88+OCD2LFjh9lxFixYgHvuuQf//vsvRowYgXHjxiEnJ6fG13bjjTdi48aNSE1NhSiK2LFjB06ePImhQ4fa9DMiIiIi91GorWzHXsTAwqWYCuUAJWV6xL74m0ue++jCofBUWX8au3btinnz5gEA2rVrh2XLlmHbtm0YPHgwtm7din///Rfnzp2TVh7/7LPP0KlTJ+zbtw89e/YEAOh0Onz22Wc1FoipVCqEhoYCALKzszFlyhRMmjQJkyZNAgAsXrwYEydOxLRp0wAY2w7v3bsXixcvxsCBA6XjTJw4Effddx8A4NVXX8V7772Hv//+G8OGDbP4vO+++y6mTJmCli1bQqFQQCaT4eOPP8aNN95o9c+HiIiI3Et+lRkLBhauxRmLJqZr165m34eFhSEjIwMAcOzYMUREREhBBQDExsaiWbNmOHbsmLQtMjLSqq4TZWVluPPOO9GqVSu888470vZjx46hb9++Zvv27dvX7DmuHquXlxd8fHyksVry7rvvYu/evdi4cSOSkpLw1ltvYdq0adi6dWudYyUiInIngiAgMDDQ7NZUuyGZp0LpXTgS4oyFA3go5Ti60DXpNh5KuU37K5VKs+8FQZBWKRdF0eIfpau3e3l5WfVcjzzyCJKTk7Fv3z4oFOa/alc/j6Xnrm2sVyspKcFzzz2H77//HiNHjgRgDEwOHjyIxYsXY9CgQVaNmYiIyB0EBgYiMzPT1cNoFKrWVRSWltWyJzkbAwsHEATBpnSkxio2NhbJyclISUmRZi2OHj2KvLw8xMTE2HSsJUuWYP369UhMTERAQIDZfTExMfjjjz/MVlXfs2ePzc9RVVlZGcrKyiCTmU/CyeXyGoMRIiIicn9VZyyKdJyxcCX3/zRMDjNo0CB07doV48aNw9KlS1FeXo5p06ahf//+iI+Pt/o4W7duxdNPP433338fgYGBSE9PBwB4eHjAz88PTz31FO655x706NEDt9xyC3766Sd899139UpZ8vX1Rf/+/fHUU0/Bw8MDkZGR2LVrF9auXYslS5bYfVwiIiJq3PKrzFKwK5RrscaCJIIg4IcffoC/vz9uuukmDBo0CK1bt8b69ettOs4ff/wBvV6PqVOnIiwsTLrNmDEDADBmzBi88847ePPNN9GpUyd8+OGH+OSTTzBgwIB6jf+rr75Cz549MW7cOMTGxuK1117DK6+8gqlTp9bruERERNR4VQ0mWLztWoJoa7/SJqC2pctLS0tx7tw5REdHQ6PRuGiE1JTwd46IiKhmL/98FB//cQ4AcEePFlhyTzfXDugaU9vn4qtxxoKIiIiI3BZnLBoP1lgQERERuZmSkhKsXr3abNukSZPg4eHhohG5ToFZYMHibVdiYEFERETkZgoLC/Hoo4+abbvnnnuaZmBhto4FZyxcialQREREROS2CtkVqtFgYEFEREREbos1Fo0HAwsiIiIicluFTIVqNBhYEBEREZHbqlpjUaQtB1dScB0GFkRERETklgwGEYW6ysDCIAKlZQYXjqhpY2BBRERERG6puEyPqycomA7lOgwsyGmioqKwdOlS6XtBEPDDDz/U65iOOAYRERFdG0z1FQqZAC+VHAALuF2JgQU1mLS0NAwfPtyqfefPn49u3brV6xjOsGbNGjRr1sxhx6vpdRIREVHdCrXGVrM+GgW81IqKbQwsXIWBBdVKp9M57FihoaFQq9UuP0ZjIIoiysv5h4+IiKg+8itmLLw1CnhrGFi4GgMLB8rMzLT7VlJSUuNxs7KyLD7GVgMGDMCjjz6KRx99FM2aNUNAQABeeOEFs+4JUVFRePnllzFx4kT4+flhypQpAIA9e/bgpptugoeHByIiIvD444+jqKhIelxGRgZGjRoFDw8PREdH44svvqj2/FenMV28eBH33nsvmjdvDi8vL8THx+Ovv/7CmjVrsGDBAhw6dAiCIEAQBKxZs8biMQ4fPoybb74ZHh4eCAgIwMMPP4zCwkLp/okTJ2LMmDFYvHgxwsLCEBAQgOnTp6OsrHIxnasdOnQIAwcOhI+PD3x9fREXF4f9+/dj586dePDBB5GXlyeNa/78+QCAzz//HPHx8fDx8UFoaCjuv/9+ZGRkSMfcuXMnBEHAb7/9hvj4eKjVanz22Wc1vk4iIiKqmykVyluthHfFjAVToVzH5YHF8uXLER0dDY1Gg7i4OOzevbvGfdPS0nD//fejQ4cOkMlkmDlzZq3H/uqrryAIAsaMGePYQdcgODjY7tvq1atrPG5MTIzFx9jj008/hUKhwF9//YV3330Xb7/9Nj7++GOzfd5880107twZSUlJmDt3Lg4fPoyhQ4fijjvuwL///ov169fjjz/+wKOPPio9ZuLEiTh//jy2b9+Ob7/9FsuXLzf7YH21wsJC9O/fH5cuXcLGjRtx6NAhPP300zAYDBg7diyeeOIJdOrUCWlpaUhLS8PYsWOrHaO4uBjDhg2Dv78/9u3bh2+++QZbt241GxcA7NixA2fOnMGOHTvw6aefYs2aNbV+gB83bhxatmyJffv2ISkpCc8++yyUSiX69OmDpUuXwtfXVxrXk08+CcA4s/PSSy/h0KFD+OGHH3Du3DlMnDix2rGffvppLFq0CMeOHcOQIUOsep1ERERkmWl2wketgJeKMxaupnDlk69fvx4zZ87E8uXL0bdvX3z44YcYPnw4jh49ilatWlXbX6vVIigoCM8//zzefvvtWo994cIFPPnkk+jXr5+zhu+WIiIi8Pbbb0MQBHTo0AGHDx/G22+/Lc1MAMDNN98sfWAGgAkTJuD++++XArl27drh3XffRf/+/fHBBx8gOTkZv/zyC/bu3YtevXoBAFatWoWYmJgax/Hll18iMzMT+/btQ/PmzQEAbdu2le739vaGQqFAaGhojcf44osvUFJSgrVr18LLywsAsGzZMowaNQqvv/46QkJCAAD+/v5YtmwZ5HI5OnbsiJEjR2Lbtm1mr7mq5ORkPPXUU+jYsaP0ek38/PwgCEK1cU2aNEn6unXr1nj33Xdx/fXXo7CwEN7e3tJ9CxcuxODBg216nURERGSZacbCR6OAIAgAgCKt3pVDatJcOmOxZMkSPPTQQ5g8eTJiYmKwdOlSRERE4IMPPrC4f1RUFN555x1MmDABfn5+NR5Xr9dj3LhxWLBgAVq3bu2s4bulG264QfqPBwC9e/fGqVOnoNdX/ieMj483e0xSUhLWrFkDb29v6TZ06FAYDAacO3cOx44dg0KhMHtcx44day1yPnjwILp37y4FFfY4duwYrrvuOimoAIC+ffvCYDDgxIkT0rZOnTpBLpdL34eFhdU6mzJ79mxMnjwZgwYNwmuvvYYzZ87UOZYDBw5g9OjRiIyMhI+PDwYMGADAGKRUdfXPloiIiOyXX2pMbfbWKOCtZlcoV3NZYKHT6ZCUlIQhQ4aYbR8yZAj27NlTr2MvXLgQQUFBeOihh+p1nKaq6gd1ADAYDPjf//6HgwcPSrdDhw7h1KlTaNOmjVSjUTVgqYuHh0e9xymKYo3PWXW7Uqmsdp/BUPPiOfPnz8eRI0cwcuRIbN++HbGxsfj+++9r3L+oqAhDhgyBt7c3Pv/8c+zbt0/a/+ri96t/tkRERGQ/U9qTt5pdoRoDl6VCZWVlQa/XS+kqJiEhIUhPT7f7uH/++SdWrVqFgwcPWv0YrVYLrVYrfZ+fn2/Xc9d2FbwuVdNlrnbs2DGHLU+/d+/eat+3a9fO7Ir+1Xr06IEjR46YpSpVFRMTg/Lycuzfvx/XX389AODEiRPIzc2t8Zhdu3bFxx9/jJycHIuzFiqVymwWxZLY2Fh8+umnKCoqkj6w//nnn5DJZGjfvn2tj61L+/bt0b59e8yaNQv33XcfPvnkE9x+++0Wx3X8+HFkZWXhtddeQ0REBABg//79Vj2PNa+TiIjoakFBQQ77bODOCqt0hULFj4MzFq7j8uLtq68413YVui4FBQX4v//7P3z00UcIDAy0+nGLFi2Cn5+fdDN9OLRVUFCQ3bfaruAHBgZafIw9UlJSMHv2bJw4cQLr1q3De++9hxkzZtT6mGeeeQaJiYmYPn06Dh48iFOnTmHjxo147LHHAAAdOnTAsGHDMGXKFPz1119ISkrC5MmTa31N9913H0JDQzFmzBj8+eefOHv2LDZs2IDExEQAxrS3c+fO4eDBg8jKyjIL/EzGjRsHjUaDBx54AP/99x927NiBxx57DOPHj68WsFqrpKQEjz76KHbu3IkLFy7gzz//xL59+6R6kaioKBQWFmLbtm3IyspCcXExWrVqBZVKhffeew9nz57Fxo0b8dJLL1n1fNa8TiIiIrLMrHjb1BVKx8DCVVwWWAQGBkIul1ebncjIyLD7Q+GZM2dw/vx5jBo1CgqFAgqFAmvXrsXGjRuhUChqzJWfM2cO8vLypFtKSopdz+8OJkyYgJKSElx//fWYPn06HnvsMTz88MO1PqZr167YtWsXTp06hX79+qF79+6YO3cuwsLCpH0++eQTREREoH///rjjjjvw8MMP19q5SqVSYcuWLQgODsaIESPQpUsXvPbaa9LMyZ133olhw4Zh4MCBCAoKwrp166odw9PTE7/99htycnLQs2dP3HXXXbjllluwbNkyO386gFwuR3Z2NiZMmID27dvjnnvuwfDhw7FgwQIAQJ8+fTB16lSMHTsWQUFBeOONNxAUFIQ1a9bgm2++QWxsLF577TUsXrzYquez5nUSERGRZQWmwEJT2W62oJSBhasIogvn0Xr16oW4uDgsX75c2hYbG4vRo0dj0aJFtT52wIAB6NatG5YuXSptKy0txenTp832e+GFF1BQUIB33nkH7du3h0qlqnNc+fn58PPzQ15eHnx9fc3uKy0txblz56QWue7E0s+MGj93/p0jIiJypgmr/8bvJzPx1t3XQW8Q8fSGfzGwQxA+efB6Vw/tmlHb5+KrubTd7OzZszF+/HjEx8ejd+/eWLlyJZKTkzF16lQAxpmE1NRUrF27VnqMqXaisLAQmZmZOHjwIFQqFWJjY6HRaNC5c2ez5zB1Jrp6OxERERG5t8IqXaHK9cZr5Ww36zouDSzGjh2L7OxsLFy4EGlpaejcuTM2b96MyMhIAMYF8a5u19m9e3fp66SkJHz55ZeIjIzE+fPnG3LoRERERORiVWssdHqD2TZqeC4NLABg2rRpmDZtmsX7LK2ObGvmVm0rLDc1O3fudPUQiIiIyAG0Wi02btxotu22226DWq120Yhco2pXKF25MbBg8bbruDywICIiIiLb5Ofn45577jHblpGRYXfXSHdVUFpZvF1aZkyBYrtZ12FgQURERERux2AQUairXCBPITMuV8BUKNdhYGGn2lZuJnIk/q4RERFVV1ymhylD3kejgLwisCgtM6Bcb4BC7vLl2pocBhY2UqlUkMlkuHTpEoKCgqBSqexe0I+oNqIoQqfTITMzEzKZzKpWyURERE2Fqb5CIROgVshQ9eNYkVYPP08GFg2NgYWNZDIZoqOjkZaWhkuXLrl6ONQEeHp6olWrVpDJ+AeSiIjIpFBrbDXro1FAEASoFXKo5DLo9AYU6srh56l08QibHgYWdlCpVGjVqhXKy8uh17NXMjmPXC6HQqHgrBgREdFV8qt0hDLxUsuhKzawgNtFGFjYSRAEKJVKKJWMhomIiIgamtRqVl35WcxLrcCV4jIWcLsIcyuIiIiIyO1UXRzPxLvia85YuAYDCyIiIiJyO4UWU6EYWLgSAwsiIiIicjv5pZXF2yamwKJQyxpYV2BgQURERERux5QK5W2WCiUHwBkLV2FgQURERERux1IqlLc0Y8HAwhUYWBARERGR27FUvO3FwMKlGFgQERERkdspsJgKxeJtV2JgQURERERup6AiFcpHY76OBcAZC1fhAnlEREREbiYgIAAZGRnVtjUlhRVdodhutvFgYEFERETkZmQyGYKCglw9DJeyvECeqSsU2826AlOhiIiIiMjtWFwgT8VUKFdiYEFEREREbsdSjQWLt12LgQURERERuRWDQUShzkJXKA1nLFyJgQURERERuZXiMj1E0fi1j4XibQYWrsHibSIiIiI3o9PpsGfPHrNtffr0gUqlctGIGpapvkIhE6BWVF4nr5oKJYoiBEFwyfiaKgYWRERERG4mLy8PAwcONNuWkZHRZDpFFWorW81WDR5MMxYGESgtM8BDJXfJ+JoqpkIRERERkVvJlwq3za+ReyorAwmmQzU8BhZERERE5FakVrNqpdl2mUyAl8q0lgUDi4bm8sBi+fLliI6OhkajQVxcHHbv3l3jvmlpabj//vvRoUMHyGQyzJw5s9o+H330Efr16wd/f3/4+/tj0KBB+Pvvv534CoiIiIioIVlaHM+EBdyu49LAYv369Zg5cyaef/55HDhwAP369cPw4cORnJxscX+tVougoCA8//zzuO666yzus3PnTtx3333YsWMHEhMT0apVKwwZMgSpqanOfClERERE1EAsLY5nwrUsXMelgcWSJUvw0EMPYfLkyYiJicHSpUsRERGBDz74wOL+UVFReOeddzBhwgT4+flZ3OeLL77AtGnT0K1bN3Ts2BEfffQRDAYDtm3b5syXQkREREQNJL/UWLx9dY0FwLUsXMllgYVOp0NSUhKGDBlitn3IkCHV2qfVR3FxMcrKytC8efMa99FqtcjPzze7EREREVHjZAoavC2lQqkYWLiKywKLrKws6PV6hISEmG0PCQlBenq6w57n2WefRYsWLTBo0KAa91m0aBH8/PykW0REhMOen4iIiIgcq7ZUKC8pFUrfoGOiRlC8ffXCJY5czOSNN97AunXr8N1330Gj0dS435w5c5CXlyfdUlJSHPL8REREROR4tRVve6vZFcpVXLZAXmBgIORyebXZiYyMjGqzGPZYvHgxXn31VWzduhVdu3atdV+1Wg21Wl3v5yQiIiIi5yuoLRWKXaFcxmUzFiqVCnFxcUhISDDbnpCQgD59+tTr2G+++SZeeukl/Prrr4iPj6/XsYiIiIiocSmQFshTVruPXaFcx2UzFgAwe/ZsjB8/HvHx8ejduzdWrlyJ5ORkTJ06FYAxRSk1NRVr166VHnPw4EEAQGFhITIzM3Hw4EGoVCrExsYCMKY/zZ07F19++SWioqKkGRFvb294e3s37AskIiIiIocrrOgKVWuNhY6BRUNzaWAxduxYZGdnY+HChUhLS0Pnzp2xefNmREZGAjAuiHf1mhbdu3eXvk5KSsKXX36JyMhInD9/HoBxwT2dToe77rrL7HHz5s3D/Pnznfp6iIiIiMj5rFsgj8XbDc2lgQUATJs2DdOmTbN435o1a6ptE0Wx1uOZAgwiIiIiujbV1hXKFGyYZjWo4bi8KxQRERERkS1MNRa1FW+z3WzDc/mMBRERERHZxt/fH//991+1bU2BwSCiUFdz8bZXRbtZdoVqeAwsiIiIiNyMQqFAp06dXD0Mlygu08OUGe9jIRXKm8XbLsNUKCIiIiJyG6b6CoVMgFpR/aOsF9vNugwDCyIiIiJyG4XaylazgiBUu9+bC+S5DAMLIiIiInIb+dLieJYz+k0zFqVlBpTrDQ02LmJgQURERERuRGo1q65euA1UFm8DQJGOnaEaEou3iYiIiNxMeXk5Tpw4YbatQ4cOUCiu/Y92tS2OBwBqhRwquQw6vQGF2nL4eVgOQMjxrv3fPiIiIqJrzJUrV9C5c2ezbRkZGQgKCnLRiBpObYvjmXip5dAVG1jA3cCYCkVEREREbiO/YkVtS4vjmXixgNslGFgQERERkduQUqFqmbHwZstZl2BgQURERERuw7pUKAYWrsDAgoiIiIjcRl3F20DVVCh2hWpIDCyIiIiIyG0UaE3tZmtLhTK2nOWMRcNiYEFEREREbqNASoWquY2sl4rF267AwIKIiIiI3EZhRVeoWou3NQwsXIGBBRERERG5DWtqLNgVyjUYWBARERGR27ClKxRnLBoWAwsiIiIichtSjYUVXaE4Y9GwGFgQERERkVswGEQU6kwL5NVcvF3ZFYrtZhsSAwsiIiIicgvFZXqIovHr2oq32RXKNRhYEBEREZFbMNVXKGQC1IqaP8ayeNs1GFgQEZFTGAyiq4dARNeYQq2x1ay3RgFBEGrcjzUWrlHzHBIREZGdUnKKMWrZH7jv+lZ4ZlhHVw+H6Jrj5+eHHTt2VNt2rcu3onAbqOwYVcDAokExsCAiIoc7kJKL3OIybD16mYEFkROoVCoMGDDA1cNocKZUqNoKtwHzVChRFGud3SDHcXkq1PLlyxEdHQ2NRoO4uDjs3r27xn3T0tJw//33o0OHDpDJZJg5c6bF/TZs2IDY2Fio1WrExsbi+++/d9LoiYjIkrwSY7pCZqHWxSMhomuJNYvjAZWpUAYRKC0zOH1cZOTSwGL9+vWYOXMmnn/+eRw4cAD9+vXD8OHDkZycbHF/rVaLoKAgPP/887juuuss7pOYmIixY8di/PjxOHToEMaPH4977rkHf/31lzNfChERVZFfEVjkFpdBW852j0TkGNYsjgcAnkp55WOYDtVgXBpYLFmyBA899BAmT56MmJgYLF26FBEREfjggw8s7h8VFYV33nkHEyZMqDGPcOnSpRg8eDDmzJmDjh07Ys6cObjllluwdOlSJ74SIiKqyhRYAEBWoc6FIyGia0l+aUXxdh0zFjKZAC+VaS0LBhYNxWWBhU6nQ1JSEoYMGWK2fciQIdizZ4/dx01MTKx2zKFDh9Z6TK1Wi/z8fLMbERHZz/TmDwCZBUyHIiLHkFKh6pixACrToThj0XBcFlhkZWVBr9cjJCTEbHtISAjS09PtPm56errNx1y0aBH8/PykW0REhN3PT0RElTUWAJCRX+rCkRBdmwwGAzIzM81uBsO1X0tgbSoUwLUsXMHlxdtXV+k7onLf1mPOmTMHeXl50i0lJaVez09E1NTll1S+kbOAm8jxsrOzERwcbHbLzs529bCcztribaDKWhY6BhYNxWXtZgMDAyGXy6vNJGRkZFSbcbBFaGiozcdUq9VQq9V2PycREZkzn7FgYEFEjmFal6KuGouq+xSUMrBoKC6bsVCpVIiLi0NCQoLZ9oSEBPTp08fu4/bu3bvaMbds2VKvYxIRkW3Maiw4Y0FEDlIgpULVvo4FUHX1bXamayguXSBv9uzZGD9+POLj49G7d2+sXLkSycnJmDp1KgBjilJqairWrl0rPebgwYMAgMLCQmRmZuLgwYNQqVSIjY0FAMyYMQM33XQTXn/9dYwePRo//vgjtm7dij/++KPBXx8RUVOVzxkLInKCwoqLFtYUb3ur2RWqobk0sBg7diyys7OxcOFCpKWloXPnzti8eTMiIyMBGBfEu3pNi+7du0tfJyUl4csvv0RkZCTOnz8PAOjTpw+++uorvPDCC5g7dy7atGmD9evXo1evXg32uoiImjJRFJFfyhoLInI8e2os2BWq4bg0sACAadOmYdq0aRbvW7NmTbVtoijWecy77roLd911V32HRkREdijS6aE3VP6tzmRXKCJyEHaFatxc3hWKiIiuLVULtwHjjIU1F4WIiOoi1ViwK1SjxMCCiIgcylRf4VtxRbFML1YLNoiIbGUwiCjUWT9jUZkKxeLthsLAgoiIHMoURAT6qOHnYezcksHVt4monorL9DBNfvpa0RWKxdsNj4EFERE5VOWMhRLBPsY1gjIZWBBRPZnqKxQyAWpF3R9hpRkLrmPRYGwOLC5fvozx48cjPDwcCoUCcrnc7EZERE2bacbCz0OJoIrAIqOABdxEVD+FWuPfFm+NAoIg1Lm/N7tCNTibu0JNnDgRycnJmDt3LsLCwqw6sURE1HSYWs36eighr3iL4IwFEdVXvg2F21X3Y/F2w7E5sPjjjz+we/dudOvWzQnDISIid1c5Y6GAh9I4k81F8oiovkwpTT5W1FcAVVfeZmDRUGwOLCIiItg2kIiIalS1xsLfUwWAi+QRUf3ZsjgewFQoV7C5xmLp0qV49tlnpZWuiYiIqsq3VGPBGQsiqidbFscDKmcsSssMKNcbnDYuqmTzjMXYsWNRXFyMNm3awNPTE0ql+XRUTk6OwwZHRETuJ7+0YsbCo0pXKM5YEDmUr68vvv7662rbrmWmvy3W1lh4qSubChXp9PDzYDNUZ7M5sFi6dKkThkFERNcKi12h8tkVisiR1Go17r77blcPo0GZUpqsnbFQK+RQygWU6UUUaculdXXIeWwOLB544AFnjIOIiK4R+SUVXaE0SgT7aIzbSstRWqaHRsm25ERkn8ribes/vnqpFcgtLmMBdwOxObAAAL1ejx9++AHHjh2DIAiIjY3FbbfdxnUsiIjIbMbC10MBlVwGnd6AzAItIpp7unh0ROSubC3eBoxpU7nFZShgYNEgbA4sTp8+jREjRiA1NRUdOnSAKIo4efIkIiIisGnTJrRp08YZ4yQiIjdRWWNhXMQqyEeN1NwSZBYysCAi+5mCA2trLKruyxmLhmFzFcvjjz+ONm3aICUlBf/88w8OHDiA5ORkREdH4/HHH3fGGImIyE2U6Q0o1ukBQMpnNtVZcJE8IqqPAqkrlPW1ElzLomHZPGOxa9cu7N27F82bN5e2BQQE4LXXXkPfvn0dOjgiInIvplazQOUiVlIBNwMLIqqHQhu7QgGVgUWhVu+UMZE5m2cs1Go1CgoKqm0vLCyESqVyyKCIiMg95ZdW5kDLZQIAVLacZWBB5DCZmZkQBMHslpmZ6ephOZWpxsLXhuJt74qWs5yxaBg2Bxa33norHn74Yfz1118QRRGiKGLv3r2YOnUqbrvtNmeMkYiI3ISpcNu3SlvHylQotpwlIvvZukAeAHipuPp2Q7I5sHj33XfRpk0b9O7dGxqNBhqNBn379kXbtm3xzjvvOGOMRETkJvItBBamlrOcsSCi+pBqLOxIheKMRcOwucaiWbNm+PHHH3Hq1CkcP34coigiNjYWbdu2dcb4iIjIjUgzFlWuKLLGgojqy2AQUaizfcaCXaEall3rWABAu3bt0K5dO0eOhYiI3Jyp1ayf2YwFayyIqH6Ky/QQRePXvjZ0hTIFIVzHomFYFVjMnj0bL730Ery8vDB79uxa912yZIlDBkZERO6n9hoLLQwGEbKKom4iImuZ6isUMgFqhfWZ/EyFalhWBRYHDhxAWVmZ9DUREZEl+SWmri2VgUWgtzGwKDeIyC0pQ3MvdhAkItsUaitazWqMC29aq7IrFNvNNgSrAosdO3ZY/JqIiKgq04xF1VQolUIGf08lrhSXIaOglIEFEdks347CbYBdoRqazV2hJk2aZHEdi6KiIkyaNMnmASxfvhzR0dHQaDSIi4vD7t27a91/165diIuLg0ajQevWrbFixYpq+yxduhQdOnSAh4cHIiIiMGvWLJSWss0hEZGzmWosfD3M3/zZGYqI6qPQzsCCxdsNy+bA4tNPP0VJSUm17SUlJVi7dq1Nx1q/fj1mzpyJ559/HgcOHEC/fv0wfPhwJCcnW9z/3LlzGDFiBPr164cDBw7gueeew+OPP44NGzZI+3zxxRd49tlnMW/ePBw7dgyrVq3C+vXrMWfOHNteKBER2SzfwowFYF5nQURkq8rF8awv3AZYY9HQrA778vPzpQXxCgoKoNFopPv0ej02b96M4OBgm558yZIleOihhzB58mQAxpmG3377DR988AEWLVpUbf8VK1agVatWWLp0KQAgJiYG+/fvx+LFi3HnnXcCABITE9G3b1/cf//9AICoqCjcd999+Pvvv20aGxER2U5ax0JjObBgy1kisoc9i+MBlYEFU6EahtUzFs2aNUPz5s0hCALat28Pf39/6RYYGIhJkyZh+vTpVj+xTqdDUlIShgwZYrZ9yJAh2LNnj8XHJCYmVtt/6NCh2L9/v1RcfuONNyIpKUkKJM6ePYvNmzdj5MiRVo+NiIjsI9VYeJoHFmw5S0T1YWoXa3cqlE4P0dSvlpzG6rOzY8cOiKKIm2++GRs2bEDz5s2l+1QqFSIjIxEeHm71E2dlZUGv1yMkJMRse0hICNLT0y0+Jj093eL+5eXlyMrKQlhYGO69915kZmbixhtvhCiKKC8vxyOPPIJnn322xrFotVpotZVvdvn5+Va/DiIiqmQqsOSMBRE5UkFpZVcoW5j21xtElJYZ4KGSO3xsVMnqs9O/f38AxjqHiIgIyGQ2l2dYdHXLMFEUa20jZmn/qtt37tyJV155BcuXL0evXr1w+vRpzJgxA2FhYZg7d67FYy5atAgLFiyoz8sgImryRFG02BUKqFpjwUYaRGQ7UyqUj40zFp7KykCiUFvOwMLJbF55OzIyErm5ufj777+RkZEBg8Fgdv+ECROsOk5gYCDkcnm12YmMjIxqsxImoaGhFvdXKBQICAgAAMydOxfjx4+X6ja6dOmCoqIiPPzww3j++ectBkRz5swxW/gvPz8fERERVr0OIiIyKtbpoTcYL/Zc3RWKMxZEVB+mGgkfG2csZDIBXio5inR6FGnLpb9F5Bw2BxY//fQTxo0bh6KiIvj4+JjNIAiCYHVgoVKpEBcXh4SEBNx+++3S9oSEBIwePdriY3r37o2ffvrJbNuWLVsQHx8PpdJ4day4uLha8CCXy6XCc0vUajXUav6iERHVh2m2QikX4KE0vyrIdrNEjuXt7Y1ly5ZV23atsrfGAjAWcBfp9CzgbgA2n50nnngCkyZNwquvvgpPT896Pfns2bMxfvx4xMfHo3fv3li5ciWSk5MxdepUAMaZhNTUVKmN7dSpU7Fs2TLMnj0bU6ZMQWJiIlatWoV169ZJxxw1ahSWLFmC7t27S6lQc+fOxW233Qa5nNNfRETOIq1hoVFWS1s1XSUsKC1HaZkeGiX/HhPVh4eHh01Nc9xdgdQVyrZ2s4AxGMko0LLlbAOwObBITU3F448/Xu+gAgDGjh2L7OxsLFy4EGlpaejcuTM2b96MyMhIAEBaWprZmhbR0dHYvHkzZs2ahffffx/h4eF49913pVazAPDCCy9AEAS88MILSE1NRVBQEEaNGoVXXnml3uMlIqKa5RVbrq8AAF+NAmqFDNpyAzILtIhoXv/3ECJqOgpNxdt2zlgAQJGOgYWz2Xx2TO1dW7du7ZABTJs2DdOmTbN435o1a6pt69+/P/75558aj6dQKDBv3jzMmzfPIeMjIiLrmDpC+VgILARBQJCPGhevlCCjoJSBBRHZpHKBPHsCC3nFMfQOHRNVZ/PZGTlyJJ566ikcPXoUXbp0kWobTG677TaHDY6IiNxHTR2hTIIrAgvWWRCRrexdIA+ospYFU6GczuazM2XKFADAwoULq90nCAL0ekaDRERNUeWq25bfWtgZiojsVZ/ibdNjTMEJOY/NZ+fq9rJERERAZfF2TTMWQVx9m4jsYDCIUiqUPTMWphoLdoVyvnqtcldayoWOiIjIyJQK5VtjKhRbzhI5SlZWFoKCgsxuWVlZrh6WUxSX6WFaMcBHbV9XKICpUA3B5sBCr9fjpZdeQosWLeDt7Y2zZ88CMC5Mt2rVKocPkIiI3EN+ifFNu64ZC6ZCEdWfKIrIysoyu9W0Xpe7M6UwKWQCNErbr4mzK1TDsfnsvPLKK1izZg3eeOMNqFQqaXuXLl3w8ccfO3RwRETkPqQZixr6zAczFYqI7FCorWg1q1FUWyPHGpWpUKwDdjabA4u1a9di5cqVGDdunNmCc127dsXx48cdOjgiInIf1tZYZBQwjZaIrGdqZW1P4bbxccbPq0yFcj6bA4vU1FS0bdu22naDwYCysjKHDIqIiNyP1BXKw/Kbv6nGIqtQB4Ph2kzZICLHK6xnYMHi7YZjc2DRqVMn7N69u9r2b775Bt27d3fIoIiIyP3k15EKFeCtgiAAeoOInGJdQw6NiNxY5eJ4thduA1VqLBhYOJ3Nod+8efMwfvx4pKamwmAw4LvvvsOJEyewdu1a/Pzzz84YIxERuYG6FshTymVo7qlCdpEOmQVaBHqrG3J4ROSm6rM4HgD4cMaiwdg8YzFq1CisX78emzdvhiAIePHFF3Hs2DH89NNPGDx4sDPGSEREjVy53oAinbEwsqZ2swA7QxGR7eqzOB7AGYuGZNcZGjp0KIYOHerosRARkZvKr7KibU0rbwPGwOJ4egE7QxGR1QpKK7tC2cObMxYNxuYZi9atWyM7O7va9tzcXLRu3dohgyIiIvdiqq/wUsmhkNf81sLOUERkK1MqlE89ZyxKywwo1xscNi6qzubA4vz589Drq/cB1mq1SE1NdcigiIjIvdRVX2HC1beJyFammQYfO2csvNSVyyOYUjbJOaw+Qxs3bpS+/u233+Dn5yd9r9frsW3bNkRFRTl0cERE5B5Ma1jUVl8BVM5YMLAgImvVt8ZCrZBDKRdQphdRpC2v8wII2c/qMzRmzBjp6wceeMDsPqVSiaioKLz11lsOGxgREbkPadVtKwMLFm8TkbUKpK5Q9gcEXmoFcovLWMDtZFYHFgaDMSctOjoa+/btQ2BgoNMGRURE7iW/xLo+88EVgUUWAwsislKhqXjbzhkLAPBSGQMLFnA7l801FgsWLICPj0+17TqdDmvXrnXIoIiIyL1YW2PBGQsislV9ayyqPpaBhXPZfIYefPBBDBs2DMHBwWbbCwoK8OCDD2LChAkOGxwREbmHyhqL2t9WTDMWhdpyFOvK4amy/4MCUVPm6emJefPmVdt2LZK6QtUjsOBaFg3D5jMkiiIEQai2/eLFi2YF3URE1HRYO2PhrVZAo5ShtMyAzAItIgMYWBDZw8vLC/Pnz3f1MBpEfYu3gcrAolDLrlDOZPUZ6t69OwRBgCAIuOWWW6BQVD5Ur9fj3LlzGDZsmFMGSUREjZtpHYu6aiwEQUCwjwbJOcUVgYVXQwyPiNyUwSBK6Uv2LpAHAN4VLWc5Y+FcNneFOnjwIIYOHQpvb2/pPpVKhaioKLRt29bhAyQiosbPtPK2NW0cg3zUSM4pZp0FEdWpuEwPUTR+7aOuR1coFWssGoLVgYUpjy8qKgpjx46FRmNc5CgvLw9ffPEF3nzzTRw6dMji4nlERHRts7bdLFBZZ8G1LIioLqb6CoVMgEZpc88hCWssGobNZ+iBBx6ARqPB9u3b8X//938ICwvDe++9hxEjRmD//v3OGCMRETVyBVbWWABVO0OVOnVMROT+CrUVrWY1Cos1vtbyZmDRIGxKVrt48SLWrFmD1atXo6ioCPfccw/KysqwYcMGxMbGOmuMRETUyFXOWNT9tsIZCyKylinNsj6F2wCLtxuK1TMWI0aMQGxsLI4cOYL33nsPly5dwnvvvVfvASxfvhzR0dHQaDSIi4vD7t27a91/165diIuLg0ajQevWrbFixYpq++Tm5mL69OkICwuDRqNBTEwMNm/eXO+xEhFRdaIoSu1mbZmxYGBBZL+cnBx06tTJ7JaTk+PqYTlcoYMCC29pHYuyeo+Jamb1WdqyZQsef/xxPPLII2jXrp1Dnnz9+vWYOXMmli9fjr59++LDDz/E8OHDcfToUbRq1ara/ufOncOIESMwZcoUfP755/jzzz8xbdo0BAUF4c477wRgXKhv8ODBCA4OxrfffouWLVsiJSXF4qJ+RERUfyVlepTpjdWVdXWFAoBgH2ONHou3ieyn1+tx9OjRatuuNY5YHA+o2hXq2vsZNSZWn6Xdu3dj9erViI+PR8eOHTF+/HiMHTu2Xk++ZMkSPPTQQ5g8eTIAYOnSpfjtt9/wwQcfYNGiRdX2X7FiBVq1aoWlS5cCAGJiYrB//34sXrxYCixWr16NnJwc7NmzB0ql8Q0uMjKyXuMkIqKa5ZdUFld6quR17s8ZCyKyVuXiePZ3hALYFaqhWJ0K1bt3b3z00UdIS0vD//73P3z11Vdo0aIFDAYDEhISUFBQYNMT63Q6JCUlYciQIWbbhwwZgj179lh8TGJiYrX9hw4div3796OszDi1tXHjRvTu3RvTp09HSEgIOnfujFdfffWajOKJiBqDqh2hrCmuNAUWWYVa6A2iU8dGRO7NEYvjVX08i7edy+auUJ6enpg0aRL++OMPHD58GE888QRee+01BAcH47bbbrP6OFlZWdDr9QgJCTHbHhISgvT0dIuPSU9Pt7h/eXk5srKyAABnz57Ft99+C71ej82bN+OFF17AW2+9hVdeeaXGsWi1WuTn55vdiIjIOrbUVwBAgJcKggAYRCCnSOfMoRGRmysorewKVR9sN9sw7G8IDKBDhw544403cPHiRaxbt86uY1x9dUsUxVqveFnav+p2g8GA4OBgrFy5EnFxcbj33nvx/PPP44MPPqjxmIsWLYKfn590i4iIsOu1EBE1RXnFplW3rXvjV8hlCPBSAWDLWSKqnZQK5bCuUAwsnKlegYWJXC7HmDFjsHHjRqsfExgYCLlcXm12IiMjo9qshEloaKjF/RUKBQICAgAAYWFhaN++PeTyyjzfmJgYpKenQ6ezfGVszpw5yMvLk24pKSlWvw4ioqbONGNhzeJ4JkEVBdyssyCi2hQ6OhVKp5cuSpPjOSSwsIdKpUJcXBwSEhLMtickJKBPnz4WH9O7d+9q+2/ZsgXx8fFSoXbfvn1x+vRpGAwGaZ+TJ08iLCwMKpXK4nHVajV8fX3NbkREZB1bVt02qVwkj4EFEdWswEFdobwqukLpDSK05YY69iZ7uSywAIDZs2fj448/xurVq3Hs2DHMmjULycnJmDp1KgDjTMKECROk/adOnYoLFy5g9uzZOHbsGFavXo1Vq1bhySeflPZ55JFHkJ2djRkzZuDkyZPYtGkTXn31VUyfPr3BXx8RUVNg6gplTatZEy6SR0TWKDCtY+GgrlBVj0mOV7/wr57Gjh2L7OxsLFy4EGlpaejcuTM2b94stYdNS0tDcnKytH90dDQ2b96MWbNm4f3330d4eDjeffddqdUsAERERGDLli2YNWsWunbtihYtWmDGjBl45plnGvz1ERE1BaYZC2uLtwG2nCUi6xSairfrmQolkwnwUslRpNOjSFsu/Q0ix3JpYAEA06ZNw7Rp0yzet2bNmmrb+vfvj3/++afWY/bu3Rt79+51xPCIiKgOlTUW1r+lcMaCiKzhqAXyAGMBd5FOzwJuJ3JpKhQREbm/+sxYsCsUEdWmcoG8+gcWXMvC+RhYEBFRveSbirdtqrFgVygiqpujFsgDqqxloWNg4SwMLIiIqF5YY0FEzmAwiJXtZh2SCmXsDFWo1df7WGQZAwsiIqoXU4cVe9rNmgopiYiuVlymh2nJCR91/bpCAUyFagguL94mIiL3lm/HjIW3WgFPlRzFOj0yC7RSigIRWUej0VRrfqPRaFw0Gucw1VfIZQI0yvpfC/diYOF0/EtORER20xtEKQfa18ZUhSAfNS5kFyOjQIuoQC9nDI/omuXj44P333/f1cNwqkKt8aKFj0YBQRDqfTzTjAXXsXAepkIREZHdCipazQK2pUIBbDlLRLXLL3Vc4XbV43DGwnkYWBARkd1MhdueKjmUctveUthylohqU+jgwIJdoZyPgQUREdktv8T4Bm1LfYUJW84SUW0cuTgeUBlYsCuU8zCwICIiu+XZsYaFSeWMBQMLIqqucnG8+neEAgDvinazTIVyHgYWRERkt/xS2ztCmXAtCyKqjSMXxwOqzlgwsHAWdoUiIiK7STMWHra/nXDGgsh+ubm5GDNmjNm2H374Ac2aNXPJeJzB1BzCEYvjAWw32xAYWBARkd3ypcDCnhoLzlgQ2ausrAy7du2qtu1aIqVCsSuU22AqFBER2c0RNRY5RVroDaJDx0VE7q/QwalQ3kyFcjoGFkREZLf61FgEeKkhEwCDCGQXctaCiMwVOLgrFAML52NgQUREdsuraDdrTyqUXCYgwJt1FkRkmWmFbG8HdYUy1ViUlhlQrjc45JhkjoEFERHZTaqxsPOKYpA36yyIyLJCU/G2w7pCyaWvi3Rcy8IZGFgQEZHdTDUW9qRCAUCwLwMLIrLM0QvkqRVyKOUCABZwOwsDCyIispupxsKeVCigcsYio6DUYWMiomuDqSuUo2YsALacdTYGFkREZLd8zlgQkRPoyg3IrGjqEFjRQc4RvFQs4HYmBhZERGQXURSRX4/ibaDqjAUDCyKqdDqjEGV6ET4aBcL9NA47buVaFqyxcAYGFkREZJfSMgN0FZ1V7J+xMH5g4IwFEVV1PD0fABAT6gtBEBx2XNMq3oXaa2sxwcaCgQUREdnFVF8hlwnwUsnr2Nsy0yJ5nLEgoqqOpVUEFmE+Dj2ul7SWBWcsnIGBBRER2SWvSqtZe68oBvtU1liIIlffJiKj4+kFAICYMF+HHte7ouUsi7edg4EFERHZRVrDws40KKByxqKkTM9iSiKSmGYsOjo4sGDxtnO5PLBYvnw5oqOjodFoEBcXh927d9e6/65duxAXFweNRoPWrVtjxYoVNe771VdfQRAEjBkzxsGjJiIiUyqUvfUVAOCpUkjFlKyzICLA2H46q1AHQQA6hDgnFYozFs7huMbAdli/fj1mzpyJ5cuXo2/fvvjwww8xfPhwHD16FK1ataq2/7lz5zBixAhMmTIFn3/+Of78809MmzYNQUFBuPPOO832vXDhAp588kn069evoV4OEZHb0ZUbMOOrA4gN88Vjt7Sz6bGVqVD2BxaAcdaiUFuOzAItWgd51+tYRE2FSqXCXXfdVW3bteB4mjENKjrACx521m/VxJuBhVO5NLBYsmQJHnroIUyePBkAsHTpUvz222/44IMPsGjRomr7r1ixAq1atcLSpUsBADExMdi/fz8WL15sFljo9XqMGzcOCxYswO7du5Gbm9sQL4eIyO0kXbiCX/5Lx7ZjGZg6oA2Ucusnsk2tZuszYwEYW86eyypiATeRDfz8/PDNN9+4ehhOUVm47dg0KIDF287mslQonU6HpKQkDBkyxGz7kCFDsGfPHouPSUxMrLb/0KFDsX//fpSVVbYNW7hwIYKCgvDQQw9ZNRatVov8/HyzGxFRU3A6w3hlUKc34GxmkU2PlWYsPOp3jSqIi+QRURWmwu2OoY5NgwJYvO1sLgsssrKyoNfrERISYrY9JCQE6enpFh+Tnp5ucf/y8nJkZWUBAP7880+sWrUKH330kdVjWbRoEfz8/KRbRESEja+GiMg9ncoolL42XSW0liOKtwEukkdE5pw5Y1G5jgUDC2dwefH21S0KRVGstW2hpf1N2wsKCvB///d/+OijjxAYGGj1GObMmYO8vDzplpKSYsMrICJyX6cuVwks0m0LLBxVYxHMGQsiqqArN+B0xQWPmHAnpEKxK5RTuazGIjAwEHK5vNrsREZGRrVZCZPQ0FCL+ysUCgQEBODIkSM4f/48Ro0aJd1vMBhXhVUoFDhx4gTatGlT7bhqtRpqtbq+L4mIyO2Yz1gU2PRYR3SFAoCW/p4AgB0nMnA5vxQhFatxE1HTczqjEOUGEb4aBcL9HP+3gMXbzuWyGQuVSoW4uDgkJCSYbU9ISECfPn0sPqZ3797V9t+yZQvi4+OhVCrRsWNHHD58GAcPHpRut912GwYOHIiDBw8yxYmIqIorRTpkFVbOEtiaCpXnoFSoIbEhiAnzRU6RDjO/Ogi9gQvlETVVVdevsHfhzdqw3axzubQr1OzZszF+/HjEx8ejd+/eWLlyJZKTkzF16lQAxhSl1NRUrF27FgAwdepULFu2DLNnz8aUKVOQmJiIVatWYd26dQAAjUaDzp07mz1Hs2bNAKDadiKipu50pnG2IsBLhZxiHTILtMgq1CLQ27oZXEd1hdIo5Vh2f3eMeu8PJJ7NxrLtpzFjkG2tb4mamry8PKmrpsnHH38MPz8/F43IMY5XpGTGOKFwG6jaFYqBhTO4NLAYO3YssrOzsXDhQqSlpaFz587YvHkzIiMjAQBpaWlITk6W9o+OjsbmzZsxa9YsvP/++wgPD8e7775bbQ0LIiKqm6m+onMLP1zILsL57GIcS8tHv3ZBVj2+ssai/m8lbYK88dLoznjim0N4Z9tJ9GrdHDe0Dqj3cYmuVTqdDt9++63ZtuXLl7toNI5jSsl0RuE2UCUVSqevs66XbOfSwAIApk2bhmnTplm8b82aNdW29e/fH//884/Vx7d0DCIiAk5VtJptF+wNT5Xc5sDCUTUWJnfGtcSeM9nY8M9FzPjqAH6ZcROae10bC34RkXVMMxYdnRRY+FRcCNEbRBRqy+FTz+YTZM7lXaGIiMg1TJ1X2oV4S1cHrS3g1htEFJQaUwnqW2NR1cLRndA6yAuX87V48ptDMLDegqjJyCgoRVahDjIB6BDivFSoQG/jBQtb1+6hujGwICJqokypUG2DfaoEFtYVcBeWVuYn17fdbFVeagXev78HVAoZth/PwKo/zjns2ETUuB2vuLARFegFD5Xcac/TLtgYtJy8bFsnPKobAwsioiaooLQM6fmlAIC2wd6ICTO+0Z7OKISu3FDn4031FR5KOVQKx76VxIT54sVbYwEAr/96HAdTch16fCJqnKSF8UKdkwZl0j7EG4B5u21yDAYWRERNkCkNKsRXDT8PJVo084CvRoFygyjdVxtTfYWvh3NK9cb1aoURXUJRbhDx2Lp/pECGiK5dx9NNhdvOSYMyaRfCGQtnYWBBRNQEma7UmVICBEGQiiWtSYcyfdB3VOH21QRBwKI7uqKlvwdSckow57t/IYqstyC6lklrWDh9xsL4d8+UDkqOw8CCiKgJMs1KtA32lrbF2hBY5EutZp3XUcXPQ4ll9/eAQiZg8+F0fPFXct0PIiK3pC3XS3+XYsIbJhUqNbeE61k4GAMLIqIm6FRFCkDVwKJjxYJUx9JdP2Nh0i2iGZ4Z1hEAsPDnozavDk5E7uFMRhHKDSJ8NQqE+2mc+lzNPFUI8jEuBHqK6VAOxcCCiKgJqkyFqgwsqracrSvtqLLGwvk94B+6MRoDOwRBV27A9C//QRGvMBJdc6Q0qDDfBlm0TirgZjqUQzGwICJqYop15bh4pQRAZREjAHQI9YFMAHKKdMgo0NZ6jPwS44d7Z89YAIBMJuCte7ohxFeNs5lFePHHIw49flahFnN/+A+9F23Dpn/THHpsZ0nJKcauk5muHgaRw5gWxot10sJ4V2PLWedgYEFE1MScyTAuChXgpTJb2VqjlCM60AsAcLSOlKM8qcbCOV2hrtbcS4V37+0OmQBs+OciXvr5KDIq2uXaq0Snx/s7TmPAmzvx2d4LSMsrxeyvD+LwxTwHjdo5RFHEpDX78MDqv7HvfI6rh0PkEKbFOU0pmc7WrmLG4iRbzjoUAwsioibmVEb1+goTUzrU8TpW4G7IVCiTXq0DMGtQewDAqj/O4cbXd+DJbw7ZXHdhMIj4Nukibn5rJ9787QQKteXo3MIXvaKbQ1tuwP8+24+swtpnbFzpn+QrUirb7lNZLh4NkWOYZixiGmjGorIzFGcsHImBBRFREyPVV4TUHFjU9WFdmrFowMACAB69uS1W/F8PxEX6Q6c34Nukixj+zm6MX/UXdp3MrLM25I9TWbj1vT/w5DeHkJZXihbNPLB0bDdsnH4jVk6IR+tAL1zKK8W0L/5Bmb7uhQJd4dukVOnr/Q6esfjp0CV8sPMMDAa29qWGk1FQiqxCHWRC5Qd+Z2tfkQqVllcqXSih+muYOWwiImo0TMWKphzjqqxtOZvfQF2hriYIAoZ1DsOwzmH4J/kKVu0+h1/+S8PuU1nYfSoL7UO8MfnG1ritWzg0Srn0uBPpBVj0yzHsPGGsS/BRKzD95raY2CdK2s/PQ4mVE+Iw5v09+PtcDl76+SgWju7coK+vLqVlevz87yXp+wPJuSjTG6CU1/86YYlOjye+PgSd3gBvjQLjb4is9zHJeZRKJfr3719tmzsypUFFBXrBQyWvY2/H8PNUIthHjYwCLU5nFKJHK/8Ged5rHQMLIqIm5nRFKlQ7C6lQHStWvD2bVYTSMr3Zh/Oq8hpgHYu69Gjljx7j/JGSU4xP/jyP9fuScfJyIZ7e8C/e+O04JvSOwtBOofjkz3P4en8KDCKgkAn4vxsi8fgt7czqS0zaBvvg7bHdMGXtfqxNvIBO4b4Y27OVC16dZQlHL6OgtBwtmnmgSFeO3OIy/Jeah+4O+FCUdOEKdBWzNK9tPoabOwajRTOPeh+XnKNZs2bYuXOnq4fhEMcrLmTEOHlhvKu1D/FBRoEWpy4XMLBwEKZCERE1IaVleiTnFAMA2lpIhQr11aCZpxJ6g1hrG8b80obrClWXiOaeeHFULBKfuwXPjeiIMD8Nsgp1WJJwEkOX/o6v9hmDiuGdQ5Ewuz/m39bJYlBhMjg2RKrlmPvDEfyTfKWhXkqdvk26CAC4o0cLxEc2BwDsP++Y8SWerazXKNLp8fz3h7naOTUI0wxpTFjDpEGZSAXcbDnrMAwsiIiakLOZRTCIxoAgyFtd7X5BEKSrhrWlQ1XWWDSeiW9fjRIP39QGvz89EO/c2w1dWvgBALq3aoZvp/bGB/8XJ3W9qstjN7fF0E4h0OkNmPpZEi7XswOVI1zOL8XuU8ZUrjt6tETPKOMV1r8dVGex96zxOP/r3xoquQw7T2Tih4OpdTyKqP6OpxtnURuqcNvEVM/BlrOOw8CCiKgJOVUlDaqmRahMb+41tZwtLdNDV25MmWkMMxZXU8plGN2tBTY+2hf/LRiK7x7pg/io5jYdw7R2RvsQb2QUaDH18yRoy/VOGrF1fjiQCoMIxEf6IzrQCz2jTTMWOfWeWSjSluNQSi4A4P96RWLGoHYAgAU/HUVmHWuaENWHtlyP0xUNJTo2eGDBRfIcjYEFEVETcrqWjlAmpnQEU/vHq5kKt2UC4KVqPDMWVxMEAd5qhd2r+HqrFVg5Ph6+GgUOJOfixR+OuCw1SBRFKQ3qzriWAIDO4X7QKGW4UlyGM5n1+2CUdOEKyg0iWjTzQERzTzx8U2vEhvkit7gM839y7IKERFWdyShCuUGEr0aBcD9Ngz5324oGFun5pdIsLNUPAwsioibEdGWurYWOUCaVLWcLLH6QrrqGhUxm34d2dxEV6IX37u8BmQCs35+Cz/decMk4Dqfm4VRGIdQKGUZ2DQMAqBQydItoBgD4+1z96iwSz2YDAG5oHQDAOOvzxl1dIZcJ2PRvGn47kl6v4xPVxJRy2THM1+6LAPby81Ai1NcYzJiaWlD9MLAgImpCTtXSEcqkXYg35DIBeSVlSMurXlvQGDpCNaT+7YPw9LCOAIypQX9VfAhvSBsqZiuGdAo1+7lfH1WZDlUfiWeMr6l3mwBpW+cWfvjfTa0BAC/88B/yinlFtzEpKCjA9OnTzW4FBe734dgUWMQ2cBqUCQu4HYuBBRFRE6ErN+B8trEjVG2pUGqFHG2CjEXOlgq480saT0eohvK/m1pj1HXhKDeImPbFP7iUW9Jgz60t1+PHQ8a1K+6qSIMyMdVZ1KeAu1BbjsOpeQCAG1qb16I8fks7tA7yQmaBFq9sPmr3c5DjlZaWYvny5Wa30lLXNxmwlalwu2Now3aEMmEBt2MxsCAiaiLOZxdBbxDhrVZI0/81qW0F7sbYEcrZBEHAG3d2RWyYL7KLdHj4s/0oLWuYYu4dxzOQW1yGEF81bmwbaHZf91b+kAnAxSslSMuzL9jZdz4HeoOIiOYeaOnvaXafRinHG3d2hSAAX++/iD9OZdVwFCLbiaJYpdWsa2YsWMDtWAwsiIiaiMr6ipo7QplUrbO4mqnGoinNWACAh0qOlRPi0NxLhf9S8xus3uLbJGPL1zHdW0B+VU2Lt1qBTuHGtrr77FzPYq+pviI6wOL98VHNMaFiFe5nv/sXRdpyu56H6GqZhVpkF+kgEypnDhpaO85YOBQDCyJyibySMnz5V3KjWB+gqZA6QtVSX2FS64xFcdOqsaiqpb8nHru5LQBg67HLTn++7EItdp7IAADc1aOlxX3iK9az2HfOvnSovRbqK6729LCOaNHMAxevlODN307Y9TxEVzNduIgK9IKHSu6SMZj+HmYUaFlH5AAMLIjIJeZvPILnvj+MmxfvxEe/n0WZ3uDqIV3zpMLtWuorTEwtZ89lF6FYZ36FumpXqKZoYIdgAMYVrwtKnftB5MeDl1BuEHFdSz/pyurVTAXc++yosygoLatSX1FzYOGlVmDRHV0AAJ8mnkfSBccsykdN23EXp0EBgI9GKbW5PcnOUPXm8sBi+fLliI6OhkajQVxcHHbv3l3r/rt27UJcXBw0Gg1at26NFStWmN3/0UcfoV+/fvD394e/vz8GDRqEv//+25kvgYhsVKwrx6//GdtXFun0eGXzMQx/Zzf+PM38bWeqnLGoO+Ug2EeDQG8VRLF6txRTjUVTS4UyiQr0QnSgF8oNotN/Z69eu8IS0+J/Jy4X2HzFdd/5HBhEIDLAE+HNPGrd96b2QbgrriVEEXj6238brMaErl1SfYWLCrdN2jIdymFcGlisX78eM2fOxPPPP48DBw6gX79+GD58OJKTky3uf+7cOYwYMQL9+vXDgQMH8Nxzz+Hxxx/Hhg0bpH127tyJ++67Dzt27EBiYiJatWqFIUOGIDU1taFeFhHVIeHoZZSU6REZ4Ik37uyK5l4qnM4oxLiP/8L0Bu6401SU6w04m1kEwFhjYY2a0qFMXaF8NU2nePtq/dsHAQB2nsh02nMcS8vH0bR8KOUCRnUNr3G/IB81ogO9IIpAUrJtMwl7zxr3713LbEVVL4yMQaC3Gmcyi7Bs+2mbnovoaqaOUK6csQCA9sEs4HYUlwYWS5YswUMPPYTJkycjJiYGS5cuRUREBD744AOL+69YsQKtWrXC0qVLERMTg8mTJ2PSpElYvHixtM8XX3yBadOmoVu3bujYsSM++ugjGAwGbNu2raFeFhHVYeNBY+vM0deF456eEdjxxAA80DsSMgHYdDgNt7y1C+/vOA1tOa+IOkpyTjF0egM8lHK0qOPKtImp/WO1wKKJp0IBwMCOxnSoHScynLYat2ntils6hsDfS1Xrvj0r6ixsXSjPtH5FbWlQVTXzVOHlMZ0AAB/sOoMjl/Jsej4iE225XppF7ejqwKJixuIUU6HqzWWBhU6nQ1JSEoYMGWK2fciQIdizZ4/FxyQmJlbbf+jQodi/fz/KyixP/xYXF6OsrAzNmze3eD8AaLVa5Ofnm93I9fQGsVpuN7m/K0U67DppvMp7WzfjVVg/TyUWjO6Mnx/rh55R/igp0+PN305g2NLdUuEq1c+pijfwNsFeVq+WXdOMRWW72aYbWPSKbg6NUobL+VqLnbPqq0xvwA8VAXhtaVAmPe1YKC+vpEwKDKwNLABgWOcwDO8cCr1BxNTPk/DetlM4mJILvcE5ARZdm05nFKLcIMJXo5BqHFyFi+Q5jssCi6ysLOj1eoSEhJhtDwkJQXp6usXHpKenW9y/vLwcWVmW81yfffZZtGjRAoMGDapxLIsWLYKfn590i4iIsPHVkDNMWP0Xrn9lG34/6bxUA2p4m/9LQ7lBRKdwX7S9Ktc/NtwXX/+vN94eex2CfNQ4l1WEiZ/sw8Nr9yMlp9hFI7422FJfYWIKLI6nFZhdlW+q7War0ijl6NPGuKbEzpOOD35/P5mJrEItArxUGNAhqM79TYHFvxfzrK592HfOWF8RHeiFUBs/2C0Y3QmB3iqk5JTgrYSTGPP+n+jxUgKmf/EPvvo7GalMZ6Q6HE+rTIOqq/21s5kaI2QWaJFbrHPpWNydy4u3r/5lEkWx1l8wS/tb2g4Ab7zxBtatW4fvvvsOGk3NfzTnzJmDvLw86ZaSkmLLSyAnSM4uxp+ns1GoLcfkT/cj4ajz2zpSwzClQd12neWccUEQcHv3ltj+RH9MvjEacpmALUcv45YluzB7/UEknsmGgVdGbXaqoijR2voKAGgT5A2lXECBthwXr1R+UGzK7WarGljxgX/nccdf/NjwjzENanS3FlDK636rjgzwRJCPGjq9AYdScq16Dmn9ChtmK0yCfTT4deZNeOX2zhjWKRQ+GgXySsqw6XAanv3uMPq+th23vLUT8zcewfbjl91q7QuDQcSF7CKczSxEel4p8krK2LXOCVy9MF5V3mqFlCLKWYv6cVnlXWBgIORyebXZiYyMjGqzEiahoaEW91coFAgIMP/DuHjxYrz66qvYunUrunbtWutY1Go11Gq1Ha+CnOW3I8bzrJAJ0OkNeOTzJLw9thtG1fBhlNzDpdwS/F2RqlHXufTRKPHCrbG4p2cE5v14BIlns/HdgVR8dyAVrZp74q64lrgzrqXV9QJN3Skb1rAwUSlkaBvsIxURRzT3hMEgoqDiQ2JTnrEAgAEdggEcQVLyFeSVlDns55FbrMPWo8ZZkDvjWlj1GEEQcH1Uc2w6nIb9F66glxXBQqIUWNScKlybQG81xvWKxLhekSjXG3DoYh52n8rE7lNZOJB8BWcyi3Amswhr9pyHUi6gcws/dAr3RWyYH2LDfdEhxMdlaxeYiKKI5Jxi/HsxD4dT83AoJRdHLuWj0EIgpJAJ8FDJ4amSw1OlgIdSDg+VHN5qBf53U2v0uWpVdKpdZeG2aztCmbQL8UZqbglOXi7A9dH2/Z8gFwYWKpUKcXFxSEhIwO233y5tT0hIwOjRoy0+pnfv3vjpp5/Mtm3ZsgXx8fFQKiv/oL/55pt4+eWX8dtvvyE+Pt45L4CcyhRYPDciBv9ezMUPBy9hxlcHoC034C4r8o2pcfr530sQReD66OZ1trY0aR/igy+n9MKBlFx8s/8ifjp0Cck5xViScBJvbz2JG9sG4p74CAyODYFG6doPKY2V3iBWpkLZuLptTJgxsDiWlo+hnUJRoC2HKSvK16PpdoUCgIjmnmgT5IUzmUX441QWRnYNc8hxf/o3DTq9AR1DfaRVta0RH+WPTYfT8Pe5HEwfWPu+ucU6HK24YmxtR6jaKOQyxEX6Iy7SHzMHtUdeSRkSz2Tj91OZ+P1kJi5eKcGB5FwcSM6VHiMTgNZB3ogN80VsuK/0b6C3cy70iaKItLxS/HsxVwok/r2YJ9UMVaVWyKCSy1BcppdqR8oNIgpKy1FQWg5Aa7b/kUt52DZ7APw8m3awbS1RFKUZi46hrp+xAIzvNTtPZEqzu2Qfl74rzJ49G+PHj0d8fDx69+6NlStXIjk5GVOnTgVgTFFKTU3F2rVrAQBTp07FsmXLMHv2bEyZMgWJiYlYtWoV1q1bJx3zjTfewNy5c/Hll18iKipKmuHw9vaGt7f1V+rIdTILtEhKNnY2GdElDA/0iYJGKcdX+1Lw5DeHUFKmx/gbIl08SrLHxkO1p0HVRBAE9Gjljx6t/PHirbH45b80fL0/BXvP5mD3qSzsPpUFPw8lxnQLx93xEejcwvoPY01B6pUSaMsNUClkiPC3bYYnNswX3yFVyofOr/gQplHKoFYwkBvYIRhnMs9hx4kMhwUWprUrbL2IYqqz+OfCFegNIuS1FOn/fS4Hogi0DvJCsK/jC2f9PJQY1jkUwzqHSrMCB1NycTQtH0cvGW/ZRTqczijE6YxC6W8DAAT7qNE+xAetAjwRFeCJVs29EBXoiVbNPeGpqvtji67cgOScYpzNLMTZLGNK09nMIpzNKkJOUfX8eZVchpgwH3Rt2QxdWvqha0s/tA3yhkIugyiK0OkNKNHpUVKmR7FOjxJdxb9lepToyvHmbydwJrMIb245jpfHdHHYz/BSbgkCvFXX5P+zzEItsot0kAmVHZlczTSby1So+nFpYDF27FhkZ2dj4cKFSEtLQ+fOnbF582ZERho/NKalpZmtaREdHY3Nmzdj1qxZeP/99xEeHo53330Xd955p7TP8uXLodPpcNddd5k917x58zB//vwGeV1UPwlHL0MUgesimkkFhYvu6AKNUo41e85j7g//oVSnx5SbWrt4pGSL0xmF+C81HwqZgBFd7P8A5qGS444eLXFHj5a4kF2Eb5Mu4tuki0jLK8WniRfwaeIFtAnyQlSAF4J81AjyUSPQWy19HVTxtZe66VxtN7VQbB3oBYUV+fpVSZ2h0o1XF6WOUE28vsJkQIdgfPzHOew6mQmDQbS641ZNTmcU4lBKLuQyAaO7WZcGZRIT5gtvtQIF2nIcT8+vdbbDlAbliNmKugiCgMgAL0QGeEmvSRRFZBZoccQUaKTl49ilfJzLLkJGgRYZBVrAwjIZwT5qRAV4SUFHRHNPFGn1ZkFEypWSGjtUKWQC2of44LoIP3Rp0QxdW/qhfYgPVArL/y8EQYBaIYdaIUezGl6fn4cK9320F1/8lYy74iLQLaKmPa23bPspLN5yEgqZgDZB3ugY5oOOob7oGOaDmFBfhPiqIZfLERsba/Y4udw9ghBTJ7WoQC+Xp8OZsOWsY7j8nXXatGmYNm2axfvWrFlTbVv//v3xzz//1Hi88+fPO2hk5Cq/VqRBDe1UWWsjCALmjYqFp0qO5TvP4JXNx1BSpsdjN7d1eTcJso7piuRN7YPQvI6e/NaKDPDCE0M6YOag9vjjdBa+3p+ChCOXpdzu2ngo5QjyUaNdsDeW3NPtmk5hOGVnGhRQuZbFhexiFGrL2RHqKj2j/eGpkiOzQIujafn1ni0zFW0PaB+EIB/bUoLkMgE9Iv3x+8lM7DuXU2tgYVoYz57CbUcQBAHBvhoE+2owsEOwtL2oIig6m1mEC9nFuJBTjAvZRTifVYT80nIp6Pi7jra6Xio5ooO80DrQG62DvNA6yButA73QNtjb4SmTvdsE4I4eLfDdP6l4/vvD+HF6X5sD+Kr2nc/BkoSTAIzpVycuF+DE5QL8iMpZHX9PJTqG+uKe175GTKgxhSwmzLfWWarGpDEVbpuYGltkFeqQU6Rz2PtUU+PywIKoqvzSMiSeMbYOHtop1Ow+QRDw9LCO8FTJsXjLSSxJOIlinR7PDOvA4KKRE0URGw+mArA9DcoacpmA/u2D0L99EK4U6ZB04QoyC7XILKi8ZRVqkVmoRUa+1pjCUKZHck4xknOK8eXfyXhkQBuHj6uxMK0ma0vhtkmAtxrBPmpkFGhxIj1fSoVqymtYVKVWyNG3bSASjl7GjuMZ9Qos9AYR31UEFtasXWHJ9VEVgcX5K5jYN9riPleKdNIHO1cFFjXxUisQF9kccZHVi2dzi3W4kF2M89lFSM4uxvnsYqTkFMNTLa8MIAKNQUSIr7pB3xeeGxGDbccycORSPtYmXsCkGy3/7OuSV1KGmV8dhEEE7ujRAk8O6YDj6fk4llaAY2n5OJ5egLOZhbhSXIbEs9nSzBNgDDb6tw/CwI7BuKldUJ2LKrrS8Yrfv9hGFFh4qRVo6e+Bi1eMBdyN7f+Gu2BgQY3KjuMZKNOLaBfsjTZBlj8EPXpzO2iUcry86RhW7DqD0jI9Xrw1tt4pCOQ8h1PzcD67GBqlDINjLXd9cxR/LxUG1fEcRdpyZBZo8dOhS3gr4SS+2Z+Cqf1bX7MB6umKqX17AgvAeFUxoyATR9MKoK64EssZi0oDOgQh4ehl7DyZicduaWf3cf48nYXL+Vr4eShxS0xw3Q+wIL6izmLf+Zwa27f/dc54tb9tsLfNsyKu1MxThWaeKlzngFQjRwv0VuOZYR3x3PeH8daWExjRJczmtUFEUcRz3x9Gam4JIgM8sXB0Z3irFQhv5oGbO1b+TSstM65YbWyqUIDj6fk4fDEPV4rL8MPBS/jh4CXIBKBbRDPc3DEYAzoEo1O4c9aKKNSWY/fJTGw9loErxTrc3r0FhncOrXPGxpQKZZoRbSzah/jg4pUSnGJgYTcGFtSo/CalQYXWut/kfq2hUcrxwg//Yc2e8ygt0+OV27u4zTRwU/NjxdoVg2NDG0Vtg5daAS+1Ag/eGI0Pdp3B2awi7Dt/5ZpsMSiKYpVUKPsDi10nM3EsLR/RAV4AAF+N689jYzGgIpXnQPIV5Bbr0MzTvivFX+83rqF023XhdhfsdotoBqVcQEaBFsk5xYisOF9V7W3A+oqm5N6eEfgmKQUHknOx8OcjWD4uzqbHf5N0EZv+TYNCJuCde7vDu4a/lRqlHJ1b+JnNjpXrDfgnORc7TmRgx/EMHE8vwD/JufgnOReLt5xEsI8aAzoEYWCHYPRtF1ivGqnU3BJsO3YZW49lYO+ZbOiqrPGx/XgGIpp7YEq/1rg7LsJi/YS2XI8zmca/SY0pFQow/o3cfjyDBdz1wHcGajRKy/TYUbHQVF2BBQD83w2R0CjlePrbQ/hqXwq05QYsuee6a/aqs7vSG0T8VFFfMbqRrUPirVbg1q5h+Hr/Razfl3JNBhaX8kpRrNNDIRMsfsi0hqnP/LG0fARUpFdwxqJSi2Ye6BDigxOXC/D7qSy70v3OZBZi8+E0AMDYnhF2j0WjlKNry2ZIunAFf5/LqTWw4BVZx5LJBLwypgtGLfsDmw+nY8eJDLP6kdqczSzE/I1HAACzh7S3uQBcIZfh+ujmuD66OZ4Z1hGXckuw80QmdpzIwJ+ns5BRoMXX+y/i6/0XIQhAmK8GkQGmblteiArwrCiw96x28cdgEPFvap4UTJjS6EwiAzwxKCYEnio5Pt97ASk5JXjxxyN4O+EkJvSOwoTekQio0kL4dEYhyg0ifDUKhNk4q+Ns7YONf+tOsuWs3RhYUKOx+1QWSsr0aNHMA51bWHcV4664ltAoZZj51UF8fyAVo7uFS1cPqXH462w2MgqM6R03tQ9y9XCqGdszAl/vv4hNhy9h3m2x11y3I1NP9uhAL6tWcLbElAd9Ir0AncKNX7PGwtyADkE4cbkAO49n2BVYvLvtFAwiMCgmuN4F4PFR/ki6cAX7z1/B3fHmQUp2oVZamMzehfGoZrHhvniwTxQ+/uMcXvzxPyTM6l9nsbiu3IAZXx1EsU6P3q0D8L+b6l/vFd7MA/f3aoX7e7WCtlyPfeeuYPvxDOw8kYGzWUW4lFeKS3mlZjUaJoHeakQGeCIywBNyQcDOk5nILKhct0MmAHGR/rglJgSDYoLRJshbuqA3bUBbfJOUgo92n0VKTgne2XYKK3adwT3xEZjcLxqRAV5S6+qYMOekZ9WHaVbXNMtLtmNgQY2GKQ1qSKcQm/7Y3No1HP9cyMXqP8/ho91nGVg0MqZuUCO6hNbY0tGVerTyR9tgb5zOKMRPhy5hXK9ra42U0/VMgwKMQYlKIUOxTo//Uo1XKzljYW5Ah2B8+PtZu9rOns4okP6fzBzUvt5juT6qOT7cdRb7LHRO+ruivqJDiI/ZVWRynJmD22PT4TSk5JRg2fbTeHJoh1r3f2vLCRxOzUMzTyXeHtvN6pTeoqIivPnmm2bbnnrqKXh5mc9SqRVy3NguEDe2C8SLo2KRVajFheyiikL4YiRnFxn/zSlGTpEOWYXGZhdJF65Ix/BWK3BT+0AMignBgA7BNXZM8lDJMaF3FO6/vhV+PZKOD3edxeHUPHy29wK++OsChncOQ1lF6lRjS4MCKjtDmX4Ozlqs8VrGwIIahXK9AVuPXQZgXRrU1SbdGIVPE8/jz9PZ+C81jwukNRLacr2U3nHbdbb15G8ogiBgbHwEXtl8DF/vS7nmAgtTR6i2wfYXSSrkMrQP8cZ/qfk4nJoHgOtYXC0+yh/eagWyi3Q4nJpnU4HxO9tOQxSBIbEhDvnbFV/RUelsVhEyC7RmBdqJUhoUZyucxVutwLxRnTD18yR8+PsZjOkeXuP/vz9OZeHD388CAF6/s6tNBd/FxcVYsGCB2bbp06dXCyyuFuhtXNvHUuetvJIyJGcX40JOkdRiuk+bAPSKDrDpwpBCLsOtXcMxsksYEs9m48NdxqB7U8X7AVCZYtmYeKoUiGjugZScEpy6XMjAwg6N7/IhNUl/n8tBbnEZmnuppNVjbdHS3xO3Vqx6u7LijzS53q4TmcgvLUeIr7pR1y/c3qMFFDIBhy7mVcsfdnenM+1vNVtVTKjx6qJp4TGmQplTymW4sW0gAGDHiQyrH3fycgF+/tdxsxUA4OepRIeKNUuSLpjPWiSeqSjcbsP6Cmca2ikEN3cMRplexAs//AdRrL5gX3ahFrO/PggAGNerlV0X1RzNz0OJLi39cGvXcEwf2BbPDOuIfu2C7J5tFgQBfdoE4tNJ1+OXGf1wR3fj31qlXLDrvb4hmOosuFCefRhYUKNgSoMaFBNsd2enhytW4t50OA0XrxQ7bGxkP1N6x6iu4Y26Y1egt1pqg7t+X4qLR+M4oihKNRb1SYUCqqct+HpwwvtqAzsaa4h2nMi0+jHvbDsFUQSGdQpFbLjjUkN6RvsDAP4+V5nOklWolXLHr49mYOFMgiBgwW2doFHKsPdsDr4/kGp2vyiKeGbDv8go0KJtsDdeGBlbw5GuHTFhvlgythv+fPZmbJnVH61raCnvaqaFRFnAbR8GFuRyoihiy1H706BMOoX74ca2gdAbRKz645yjhkd2KtSWS+lto7s1zjSoqu6p6MTzw8FUaMv1Lh6NY2QWaJFfWg6ZYKyTqI9qgQVToarp395Y3/XvxVxkF2rr2Bs4np6PTf8aU0NmDLJ//QtLTFeD91eZsTB1g+oY6sNVhRtARHNPPF6xrskrm44ht1gn3ff53gvYeiwDKrkM797b3WJb1mtViK+m3n+PnKl9xUUYtpy1DwMLcrl/L+YhLa8UXirjCrb1YZq1WL8vBXnFZY4YHtkp4Wg6SssMiA70srrLlyvd1C4IYX4a5BaXYcuRy64ejkOYrk5HBXjZvS6CydUr5LJ4u7pQPw1iwnwhisDvp+qetXhn6ykAwMguYQ4vZDUFFkcu5aNIWw6AbWZdYfKNrdEu2BvZRTq8/usJAMbuai9vOgYAeHZ4R4fOVFH9ta+YsTh1ucBiChvVjoEFudyvFWlQAzoG19mWry792gUiJswXxTo9Pv/rgiOGR3YyLYp323Xhja6loCVymYC741oCqFyozN2Z0qDa1rO+AjDm7YdXKSxljYVlAzsY06F21pEOdfRSPn75Lx2C4PjZCsDYbrRFMw/oDSL+STamQ7G+ouGpFDK8PKYzAGDd38nYcyYLj687AG25AQM6BOHBvlGuHSBVY2yfC1wpLkNWoa7uB5AZBhbkctautm0NQRDw8E3RAIBP/jSuyE0NL7tQi92nsgAAt3VrXIvi1cbU83/3qSyk5Lh/nU59V9y+mumquiAAPo1gBfXGyNTuetfJTKnQ3ZJ3tp0EYJytMF0hdbSeUcY6i33nryCjoBRnMosgCECvRtxI4VrUq3UA7qq4aPHA6r9x4nIBAr3VWHw3F3RtjDxUcrRq7gmg8uIMWY+BBbnU6YwCnM0sgkouk6701detXcMR5qdBVqEWP1xVMEcNY/N/6dAbRHRp4Yc2jbRAz5KI5p7o29Z4NfebpIsuHk39SYFFPVrNVtWxoj2kj1ph0zoNTUmPVs3go1Egt7gMB1NyLe5z5FIefjty2ThbcYvjZytMelYEEPvO5WDvWWOtRUyoL5p5sr6ioc0Z3hHNPJUo0xuDzcV3d2Ur00asHVfgthsDC3Kp3ypy2fu2DYCPg4pBlXIZJvU1zlqs3H0WhlquGpJzbDxoDOjsWYHY1e6pmLX4dn9KrVec3YFpcTxHpEIBlTMWfp5Mg6qJQi7DTe2MF0l21dB2dmlFbcVt14VLHWic4fqKOosDKVew+6QxNYv1Fa4R4K3G/FGdIAjAtAFtuJBrIycVcHMFbpsxsCCX+vU/x6VBVXXv9RHwUStwNrMI245b31Oe6i81twT7zl+BIAC3Xhfm6uHYbGinUPh5KHEprxS7rSjAbayyC7XIKdJBEOCwWaOb2gfhupZ+uCcuwiHHu1YN6FBz29nDF/OQcPQyZAKkjkHO0ibIG808lSgtM+DHitbPrK9wnTHdW+Dw/KF4elhHVw+F6lC1gJtsw8CCXCY1twSHU/MgE4BBFWsIOIqPRon7b2gFAFj5+xmHHptq91PFB5he0c0R5ufh4tHYTqOU4/buxva47lzEbUqDaunv4bBWlr4aJX589EY85uQPxO6uf0VgcTg1D5kF5m1nl2411laM7tbC6WmCMpkgrcKtKzdAECpnMcg1vFmb5BbaVWk5y85QtmFgQS6zpaJoOz6yuVNyTSf1jYZSLmDf+StSVxR3cyK9AHev2IPJn+7Dl38lIz2v1NVDqlNlN6jGv3ZFTUzpUAlHL1u1HkFj5Oj6CrJesI9GarG862TlrMWhlFxsO54BmQA8dnPbBhnL9RUL5QFAp3BfprERWaFNkDdkApBXUlbt4gDVjoEFuYypG9SQTo6drTAJ8dVIC7Ot3HXWKc/hTDtPZODOD/Zg3/kr2HosA899fxg3LNqGEe/sxuLfTiDpwhWX1QCIoojSMj2uFOlw8UoxTl0uwMGUXPx06BKOpeVDKRcwvLNj09saUmy4L7q29EOZXqy2Yq67OG1acdtB9RVkm4EVOfQ7qtRZmGYrxnRv0WCrDsdXmaG4gattE1lFo5QjMsC4iB8XyrMN5+TIJbILtfj7nLFLiaPrK6p6+KbW+DbpIn47mo5zWUWNerXPqj5LPI95G4/AIALXRzdHv7aB2H4iAwdTcnE0LR9H0/KxbMdpNPdSYUD7IAzsGIyb2gfVuGiZKIoo0ulRUFqGgtJy5JeUoUBbjiJtOYp1ehRry1Gk06NYV44ibcW/V20v1upRpKvYX6evNajp3z4I/m6+su898RH492Ie1u9LwUM3RjeKtpAZ+aVIzS1BiK8GQT5qKOU1Xxs65eDCbbLNgA7BeG/7aew+mYlyvQGHU/Ow40Qm5DIBj9/ccKlkncP9oFHKUFpmYH0FkQ3aBnvjXFYRTl4uwI3t6rd4b1PCwIJcYtuxDBhE49R8REW/aGdoH+KDgR2CsONEJj7efRav3N7Fac/lCHqDiJd+Poo1e84DAO6Ka4lXb+8ClUKGx25ph+xCLXadzMS24xn4/WQmcop0+O5AKr47kAq5TEBcpD98NQrkm4KH0nIUlJahUFsOZ01uqBUyeKkV8FDK4aWWo5mnCjNuae+cJ2tAt3ULx8ubjuJURiEOpOSiRyv/uh/kJOV6Az78/Sze2XoKOr1B2h7orUKwjwbBvmqE+GgQ4qtGsK8GIb4anEivmLFwYtchqlm3iGZo5qlEbnEZDqTkYtn20wCAO7q3QFQDXuBQKWR4YWQs/kvNw03tHdPSmxoHQRAQGBhYbRs5RvsQbyQcvYxTGSzgtgUDC6qRrtwAuUyA3An96h25KF5dHr6pDXacyMS3SRcxa3D7Rts7vFBbjsfXHcD2ii5WTw/rgEf6tzF7owjwVuOOHi1xR4+WKNMbkHThCnYcz8C24xk4nVEozQLVRCET4KNRwEejhI9GAS+1Al4qOTwr/jV+r4CnWm78t2Kbh0oObyl4MG433hRO+f1oDHw1SozoHIbvDqTi630pLgssTmcU4ImvD+HQxTwAxmAit7gM5QYRWYU6ZBXqcDSt5sdzxsI15DIBN7ULwsZDl7B060n8eTobCpmAxxpwtsLk/26IbPDnJOcLDAxEZqb7dq5r7Co7QzEVyhYMLJqYEp0emQVaZBZqkVWoRWaB+b9ZhTrp62KdcdVqQTB+IJXLBChkMijkQrXv/T1VuL9XK4zp1gIqRe2lO4Xacuw+bVyVuSECixtaN0fXln7492IePku8gFmDG9/V9NTcEjy0Zh+OpxdArZDh7bHdMKJL7a1alXIZbmgdgBtaB2DOiBik5BRjz5ksGESYBQ++GiV8K77XKGW8omWDsT0j8N2BVPx06BLm3hoLrwbs6KI3iPh491m8lXASunIDfDQKzB/VCXf0aAFRBK4U63A5X4vLBaXIyC81fl3xb2ZBKTIKtBjQIYhdaFxoQAdjYPHn6WwAwJ09WqJVgPNmaInIcaoukieKIt87rcR3HDekN4go0xug0xtQVm5AabkBV4p0yKroW59dqEN2kU7qY59V5WtTsGALUQTK9GLFiqEGi/tcyC7GwZRcLE04iSk3tca9PVvV2OJy54kM6MoNiArwlBahcSZBEPDwTa3x6JcHsDbxPKb2b+Ow9puOcCglF5PX7kdmgRaB3mp8/EA8ukU0s/k4Ec09MbZ5K8cPsAm7Pro5ogO9cC6rCJv+TcM9PRtm/YYzmYV48ptDOJCcC8D4AfW1O7oi1E8DwBjsB3irEeCtRix8G2RMZLub2gdBEIx/QxUyAY82UCcoIqq/1kFekAlAfmk5Mgq0CPHVuHpIbsHlgcXy5cvx5ptvIi0tDZ06dcLSpUvRr1+/GvfftWsXZs+ejSNHjiA8PBxPP/00pk6darbPhg0bMHfuXJw5cwZt2rTBK6+8gttvv93ZL8VhPvnzHD7bewFlegPKyqsEEXoDdOWGeufKqxUyBHqrEeSjlv4N8laZfR/orUYzT6UxqDAYoDeIKNeLKDeI0BsMKL/q+/3nr+DjP87hUl4pFvx0FO9tP40H+0RhQu+oau0NTattD+0c2mBXAIZ1CkVLfw9cvFKCb5NSML53VIM8b11+/S8NM9cfRGmZAR1DffDxA/Fo6c8rmo2FIAi4O74l3vj1BNbvT3F6YKE3iPjkz3N487cT0JYb4KNWYO6tsbg7viWvlrmhQG81urZshkMpubg7PsKp9WRE5FgapRxRAV44W1HAzcDCOi4NLNavX4+ZM2di+fLl6Nu3Lz788EMMHz4cR48eRatW1a+8njt3DiNGjMCUKVPw+eef488//8S0adMQFBSEO++8EwCQmJiIsWPH4qWXXsLtt9+O77//Hvfccw/++OMP9OrVq6Ffol1yi8twNrPI6v2VFalIAd5qBHipEOCtQnMvFQK91WjupZK2BXip0dxbBR+1wuEfUuIim+OBPlH4NukiPvz9DFJySvBWwkl8+PtZjLuhFR66MRrBPhpoy/XYUVFD0BBpUCYKuQyTb4zG/J+O4uM/zuH+XpEurQ0QRRErdp3F678eB2C8Iv3efd3ho2GP+cbmrh4t8daWk0i6cAWnMwrQ1knrQpzPKsJT3x7CvvPGNVf6tQvEa3d2RYtm7rfIIFV68dYYbPgnFU8O6eDqoRCRjdqFeFcEFoXo147ND6whiC5cUrBXr17o0aMHPvjgA2lbTEwMxowZg0WLFlXb/5lnnsHGjRtx7NgxadvUqVNx6NAhJCYmAgDGjh2L/Px8/PLLL9I+w4YNg7+/P9atW2fVuPLz8+Hn54e8vDz4+jZ8mkFKTjEu5ZZAqZBBJZdBpZBBKZdBKRegkld8rTB+r5TJIGtkxbPlegM2HU7D8h1ncKKil75KIcPdcS3RMcwXc3/4D8E+auydc0uDjr1YV44+r21HbnEZlo/rUWcNgy1E0ZgqVlJmatlajkKtsV1rYUVL18KK9q5FOj2Op+Vjy1HjzM3EPlF4YWQMFLW0DiXXmvzpfmw9dhlT+kXj+ZGxDj22wSDi08TzeP3X4ygtM8BLJcfzI2Nx3/URnKUgInKht7acwHvbT+PenhF47c6urh6Oy9jyudhlMxY6nQ5JSUl49tlnzbYPGTIEe/bssfiYxMREDBkyxGzb0KFDsWrVKpSVlUGpVCIxMRGzZs2qts/SpUsdOn5nimju6dZT5gq5DKO7tcCoruHYfjwDy3eexj/Jufjir2RpnyGdQho8IPJUKTD+hki8t/00Fv50FOv3pcAgihBFwCCKFTdjkGCQtgF6Q0VKmsEgpaeVG4xpaWV649fG+hPbyARg3qhOeKBPlONfLDnU2J4R2HrsMr5JumhWp1T1c78AwWyb3iBCW24w3sr0FV9X/FtW+XWRthxXissAAH3aBOCNu7oyHY6I6lRSUoLVq1ebbZs0aRI8PDjL6Simdt0HU3Kx92y2S8firVagcws/l47BGi4LLLKysqDX6xESYr7qckhICNLT0y0+Jj093eL+5eXlyMrKQlhYWI371HRMANBqtdBqK5dsz8/Pt/XlkAUymYBBsSG4JSYYf53LwfKdZ/D7SWNrvFu7hrtkTA/0icLHu88hPb8U6fmlTnkOhUyAl1oBb3Vlu1avivat0tdqBQbHhJitikuN18AOQQjxVeNyvtYsQHYUT5Ucc0bEYNz1rRrdDCQRNU6FhYV49NFHzbbdc889DCwcyNRg5nh6Ae5dudelY+kW0Qw/TO/r0jFYw+XF21dP9dfV0svS/ldvt/WYixYtwoIFC6weM9lGEASpLerRS/nILdbhhtauWQE20FuN76b1weHUPMgEATIBkAkChIp/TduEKv8qZIKUiqZUyKCUyaBUGLep5MZ2u0q5cbtGJYNa0Xg6TpFjKOQyrHqgJ7Yfz4AoAiIqZ6iqJpNWnbeSCYBaIYdaIYNaKav8WiGDWlnla4UcLfw9alw1nYiIXKN9sA/u6N4C/6bmuXooaOnvHgGjywKLwMBAyOXyajMJGRkZ1WYcTEJDQy3ur1AoEBAQUOs+NR0TAObMmYPZs2dL3+fn5yMiomHaSjY1seGub40ZE+aLmDDXj4PcS+cWfm4xDU1ERI4hkwlYMrabq4fhVlxWLapSqRAXF4eEhASz7QkJCejTp4/Fx/Tu3bva/lu2bEF8fDyUSmWt+9R0TABQq9Xw9fU1uxERERERkfVcmgo1e/ZsjB8/HvHx8ejduzdWrlyJ5ORkaV2KOXPmIDU1FWvXrgVg7AC1bNkyzJ49G1OmTEFiYiJWrVpl1u1pxowZuOmmm/D6669j9OjR+PHHH7F161b88ccfLnmNRERERERNgUsDi7FjxyI7OxsLFy5EWloaOnfujM2bNyMyMhIAkJaWhuTkykLJ6OhobN68GbNmzcL777+P8PBwvPvuu9IaFgDQp08ffPXVV3jhhRcwd+5ctGnTBuvXr3ebNSyIiIiIiNyRS9exaKxcvY4FERERUW0yMzMRHBxsti0jIwNBQVzIjRzLls/FXJGLiIiIiIjqjYEFERERERHVGwMLIiIiIiKqNwYWRERERERUbwwsiIiIiIio3hhYEBERERFRvTGwICIiIiKienPpAnmNlWlpj/z8fBePhIiIiKi6goICi9vUarULRkPXMtPnYWuWvmNgYYHpP2tERISLR0JERERknTZt2rh6CHQNKygogJ+fX637cOVtCwwGAy5dugQfHx8IglDrvvn5+YiIiEBKSgpX6XYTPGfuh+fMPfG8uR+eM/fDc+ae3Om8iaKIgoIChIeHQyarvYqCMxYWyGQytGzZ0qbH+Pr6NvpfDDLHc+Z+eM7cE8+b++E5cz88Z+7JXc5bXTMVJizeJiIiIiKiemNgQURERERE9cbAop7UajXmzZvHLgxuhOfM/fCcuSeeN/fDc+Z+eM7c07V63li8TURERERE9cYZCyIiIiIiqjcGFkREREREVG8MLIiIiIiIqN4YWNgpKioKgiCY3Z599lmzfZKTkzFq1Ch4eXkhMDAQjz/+OHQ6nYtGTFVptVp069YNgiDg4MGDZvfxvDUut912G1q1agWNRoOwsDCMHz8ely5dMtuH56xxOX/+PB566CFER0fDw8MDbdq0wbx586qdE563xuWVV15Bnz594OnpiWbNmlnch+es8Vm+fDmio6Oh0WgQFxeH3bt3u3pIVOH333/HqFGjEB4eDkEQ8MMPP5jdL4oi5s+fj/DwcHh4eGDAgAE4cuSIawbrIAws6mHhwoVIS0uTbi+88IJ0n16vx8iRI1FUVIQ//vgDX331FTZs2IAnnnjChSMmk6effhrh4eHVtvO8NT4DBw7E119/jRMnTmDDhg04c+YM7rrrLul+nrPG5/jx4zAYDPjwww9x5MgRvP3221ixYgWee+45aR+et8ZHp9Ph7rvvxiOPPGLxfp6zxmf9+vWYOXMmnn/+eRw4cAD9+vXD8OHDkZyc7OqhEYCioiJcd911WLZsmcX733jjDSxZsgTLli3Dvn37EBoaisGDB6OgoKCBR+pAItklMjJSfPvtt2u8f/PmzaJMJhNTU1OlbevWrRPVarWYl5fXACOkmmzevFns2LGjeOTIERGAeODAAbP7eN4atx9//FEUBEHU6XSiKPKcuYs33nhDjI6Olr7neWu8PvnkE9HPz6/adp6zxuf6668Xp06daratY8eO4rPPPuuiEVFNAIjff/+99L3BYBBDQ0PF1157TdpWWloq+vn5iStWrHDBCB2DMxb18PrrryMgIADdunXDK6+8YjYdnJiYiM6dO5tdFR86dCi0Wi2SkpJcMVwCcPnyZUyZMgWfffYZPD09q93P89a45eTk4IsvvkCfPn2gVCoB8Jy5i7y8PDRv3lz6nufN/fCcNS46nQ5JSUkYMmSI2fYhQ4Zgz549LhoVWevcuXNIT083O39qtRr9+/d36/PHwMJOM2bMwFdffYUdO3bg0UcfxdKlSzFt2jTp/vT0dISEhJg9xt/fHyqVCunp6Q09XIIxl3HixImYOnUq4uPjLe7D89Y4PfPMM/Dy8kJAQACSk5Px448/SvfxnDV+Z86cwXvvvYepU6dK23je3A/PWeOSlZUFvV5f7ZyEhITwfLgB0zm61s4fA4sq5s+fX60g++rb/v37AQCzZs1C//790bVrV0yePBkrVqzAqlWrkJ2dLR1PEIRqzyGKosXtZD9rz9t7772H/Px8zJkzp9bj8bw5ny3/1wDgqaeewoEDB7BlyxbI5XJMmDABYpW1PXnOGoat5w0ALl26hGHDhuHuu+/G5MmTze7jeXM+e85ZbXjOGp+rf/Y8H+7lWjt/ClcPoDF59NFHce+999a6T1RUlMXtN9xwAwDg9OnTCAgIQGhoKP766y+zfa5cuYKysrJq0SnVj7Xn7eWXX8bevXuhVqvN7ouPj8e4cePw6aef8rw1EFv/rwUGBiIwMBDt27dHTEwMIiIisHfvXvTu3ZvnrAHZet4uXbqEgQMHonfv3li5cqXZfjxvDaM+72tX4zlrXAIDAyGXy6td3c7IyOD5cAOhoaEAjDMXYWFh0nZ3P38MLKowfXixx4EDBwBA+uXo3bs3XnnlFaSlpUnbtmzZArVajbi4OMcMmABYf97effddvPzyy9L3ly5dwtChQ7F+/Xr06tULAM9bQ6nP/zXTTIVWqwXAc9aQbDlvqampGDhwIOLi4vDJJ59AJjOfIOd5axj1+b92NZ6zxkWlUiEuLg4JCQm4/fbbpe0JCQkYPXq0C0dG1oiOjsb/t3fHoE2tYRjHn0NuU00DqRVpjIqBagOlg3XpUsXqIggdaqGI0OrmUEGwuBYdxC5iFXGSVByMS12kCIppFiFIe8CgiGCbSEQsImKo2Gr73sF7Q4Mdrp6kycX/DzLk8PHxHl4OnIdzvu+Ew2E9fPhQHR0dkn6sm0mlUhodHa1ydR5Ua9X4/9mTJ0/s8uXL5rquzc7O2t27dy0SiVhPT09xzPfv3629vd0OHTpkMzMz9ujRI9u+fbsNDQ1VsXKsNjc399OuUPSttqTTabt27Zq5rmvZbNYeP35sXV1d1tLSYl+/fjUzelaL3r59a7t27bKDBw9aPp+3d+/eFX//om+1J5fLmeu6dv78eQsGg+a6rrmua4VCwczoWS1KJBJWV1dnN2/etBcvXtiZM2esoaHBstlstUuDmRUKheJ1JKl475jL5czM7NKlSxYKhWxiYsIymYwdO3bMtm7dap8/f65y5b+PYPEbpqenrbOz00KhkG3YsMFisZiNjIzYwsJCybhcLmdHjhyxjRs3WlNTkw0NDRVvhlB9awULM/pWS549e2bd3d3W1NRk9fX1Fo1G7dSpU5bP50vG0bPaEo/HTdKav9XoW20ZHBxcs2fJZLI4hp7VnuvXr9vOnTvN7/fb3r17LZVKVbsk/COZTK55TQ0ODprZjy1nR0ZGLBwOW319ve3fv98ymUx1i/bIMVu1AhIAAAAAfgO7QgEAAADwjGABAAAAwDOCBQAAAADPCBYAAAAAPCNYAAAAAPCMYAEAAADAM4IFAAAAAM8IFgAAAAA8I1gAANbd1NSUHMfRp0+fql0KAKBM+PI2AKDiDhw4oD179ujKlSuSpKWlJX38+FHNzc1yHKe6xQEAyuKvahcAAPjz+P1+hcPhapcBACgjXoUCAFTUiRMnlEqlNDY2Jsdx5DiOxsfHS16FGh8fV2Njo+7fv69YLKZAIKC+vj4tLCzo1q1bikaj2rRpk06fPq3l5eXi3EtLSzp37py2bdumhoYGdXZ2ampqqjonCgB/OJ5YAAAqamxsTK9evVJ7e7suXLggSXr+/PlP4758+aKrV68qkUioUCiot7dXvb29amxs1OTkpGZnZ3X06FF1dXWpv79fknTy5Ells1klEglFIhHdu3dPhw8fViaT0e7du9f1PAHgT0ewAABUVCgUkt/vVyAQKL7+9PLly5/Gffv2TTdu3FBLS4skqa+vT7dv39b79+8VDAbV1tam7u5uJZNJ9ff36/Xr17pz547y+bwikYgkaXh4WA8ePFA8HtfFixfX7yQBAAQLAEBtCAQCxVAhSc3NzYpGowoGgyXH5ufnJUkzMzMyM7W2tpbMs7i4qM2bN69P0QCAIoIFAKAm1NXVlfx3HGfNYysrK5KklZUV+Xw+TU9Py+fzlYxbHUYAAOuDYAEAqDi/31+y6LocOjo6tLy8rPn5ee3bt6+scwMAfh27QgEAKi4ajSqdTiubzerDhw/Fpw5etLa26vjx4xoYGNDExITm5ub09OlTjY6OanJysgxVAwB+BcECAFBxw8PD8vl8amtr05YtW/TmzZuyzBuPxzUwMKCzZ88qFoupp6dH6XRaO3bsKMv8AID/ji9vAwAAAPCMJxYAAAAAPCNYAAAAAPCMYAEAAADAM4IFAAAAAM8IFgAAAAA8I1gAAAAA8IxgAQAAAMAzggUAAAAAzwgWAAAAADwjWAAAAADwjGABAAAAwDOCBQAAAADP/gYGmPSmRPtnKQAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "plot_attention(nf.models[0], plot=8)" @@ -1941,18 +1303,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "dict_keys(['Past variable importance over time', 'Future variable importance over time', 'Static covariates'])" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "\n", @@ -1971,28 +1322,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAkEAAAGdCAYAAAAVEKdkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAauUlEQVR4nO3de3BU9Rnw8SchIRAwBFEgQIaLchMERQZFtKKIzogoTlttqSJWRx3HCli1OFgR2g5VWy22XqkF25HiW7zVSlVK0QJeqgiOFaodBMERCqJchCpizvuHL3mNYEpSyJL8Pp+ZnTG7Z3ef/IicL2fPZvOyLMsCACAx+bkeAAAgF0QQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASSrI9QAHsoqKinjvvffioIMOiry8vFyPAwDshSzLYuvWrdGuXbvIz//q4z0iqBrvvfdelJeX53oMAKAW1qxZEx06dPjK20VQNQ466KCI+HwRS0pKcjwNALA3tmzZEuXl5ZX78a8igqqx6yWwkpISEQQA9cx/O5XFidEAQJJEEACQJBEEACTJOUEA1EtZlsXOnTvjs88+y/Uo1LFGjRpFQUHB//zra0QQAPXOjh07Yu3atbF9+/Zcj0KOFBcXR1lZWTRu3LjWjyGCAKhXKioqYuXKldGoUaNo165dNG7c2C+0TUiWZbFjx47YsGFDrFy5Mrp27VrtL0SsjggCoF7ZsWNHVFRURHl5eRQXF+d6HHKgadOmUVhYGO+8807s2LEjmjRpUqvHcWI0APVSbf/1T8OwL/78/QQBAEkSQQBQRwYPHhxjx47N9Rj8P84JAqDB6DT+yTp9vlU/HVaj7R955JEoLCzcT9P8b5599tk4+eST48MPP4zS0tJcj1MnRBAA1JGDDz441yPs0aeffprrEXLCy2EAUEe++HJYp06d4sc//nGMGjUqmjdvHh07dozHH388NmzYEGeffXY0b948jjzyyHjllVcq7z9jxowoLS2Nxx57LLp16xZNmjSJoUOHxpo1a6o8z9133x2HHXZYNG7cOLp37x6/+93vqtyel5cX99xzT5x99tnRrFmzuOSSS+Lkk0+OiIiWLVtGXl5ejB49OiIinnrqqTjhhBOitLQ0WrVqFWeeeWasWLGi8rFWrVoVeXl58cgjj8TJJ58cxcXF0bdv33jhhReqPOeiRYvipJNOiuLi4mjZsmWcfvrp8eGHH0bE5297v+WWW6JLly7RtGnT6Nu3b8yePXufrHl1RBAA5Mjtt98egwYNiiVLlsSwYcPiggsuiFGjRsX5558fr776ahx++OExatSoyLKs8j7bt2+Pn/zkJ/HAAw/EokWLYsuWLfGtb32r8vZHH300xowZE9///vfjH//4R1x22WVx0UUXxfz586s898SJE+Pss8+O119/PSZPnhwPP/xwRES8+eabsXbt2pg6dWpERGzbti2uvvrqePnll2PevHmRn58f55xzTlRUVFR5vAkTJsQ111wTS5cujW7dusW3v/3t2LlzZ0RELF26NIYMGRK9evWKF154IRYuXBjDhw+v/G3fN9xwQ0yfPj3uvvvueOONN2LcuHFx/vnnx3PPPbfvF/0L8rIvrixVbNmyJVq0aBGbN2+OkpKSXI8DQER8/PHHsXLlyujcufNuvx/mQD8naPDgwXHUUUfFL37xi+jUqVOceOKJlUdp1q1bF2VlZfHDH/4wJk+eHBERL774YgwcODDWrl0bbdu2jRkzZsRFF10UL774Yhx77LEREfHPf/4zevbsGS+99FIMGDAgBg0aFL169Yr77ruv8nnPPffc2LZtWzz55Ofrk5eXF2PHjo3bb7+9cpu9PSdow4YN0bp163j99dejd+/esWrVqujcuXP8+te/josvvjgiIpYtWxa9evWK5cuXR48ePWLkyJGxevXqWLhw4W6Pt23btjjkkEPir3/9awwcOLDy+ksuuSS2b98eM2fO3OMc1f0c7O3+25EgAMiRPn36VP53mzZtIiLiyCOP3O269evXV15XUFAQ/fv3r/y6R48eUVpaGsuXL4+IiOXLl8egQYOqPM+gQYMqb9/li49RnRUrVsTIkSOjS5cuUVJSEp07d46IiNWrV3/l91JWVlZl7l1HgvZk2bJl8fHHH8fQoUOjefPmlZff/va3VV522x+cGA0AOfLFd4rt+uiPPV335Zee9vQxIV+87su3Z1m223XNmjXbqxmHDx8e5eXlMW3atGjXrl1UVFRE7969Y8eOHf/1e9k1d9OmTb/y8Xdt8+STT0b79u2r3FZUVLRXM9aWI0EAUI/s3LmzysnSb775ZmzatCl69OgRERE9e/bc7WWn559/Pnr27Fnt4+76INJd5+lERGzcuDGWL18eN9xwQwwZMiR69uxZeTJzTfTp0yfmzZu3x9uOOOKIKCoqitWrV8fhhx9e5VJeXl7j56oJR4IAoB4pLCyM733ve3HHHXdEYWFhXHnllXHcccfFgAEDIiLi2muvjXPPPTf69esXQ4YMiSeeeCIeeeSR+Mtf/lLt43bs2DHy8vLiT3/6U5xxxhnRtGnTaNmyZbRq1Sruu+++KCsri9WrV8f48eNrPPP1118fRx55ZFxxxRVx+eWXR+PGjWP+/PnxzW9+Mw455JC45pprYty4cVFRUREnnHBCbNmyJZ5//vlo3rx5XHjhhbVap73hSBAA1CPFxcXxgx/8IEaOHBkDBw6Mpk2bxqxZsypvHzFiREydOjVuvfXW6NWrV9x7770xffr0GDx4cLWP2759+5g0aVKMHz8+2rRpE1deeWXk5+fHrFmzYvHixdG7d+8YN25c3HrrrTWeuVu3bvHMM8/Ea6+9FgMGDIiBAwfG448/HgUFnx+L+dGPfhQ33nhjTJkyJXr27Bmnn356PPHEE5XnH+0v3h1WDe8OAzjwVPeuoIZuxowZMXbs2Ni0aVOuR8k57w4DAKglEQQAJEkEAUA9MXr0aC+F7UMiCABIkggCAJIkggCol7y5OW374s9fBAFQr+z6eIbt27fneBJyadef/xc/rqOm/MZoAOqVRo0aRWlpaeWHcxYXF+/xs7RomLIsi+3bt8f69eujtLQ0GjVqVOvHEkEA1Dtt27aNiKqfrk5aSktLK38OaksEAVDv5OXlRVlZWbRu3To+/fTTXI9DHSssLPyfjgDtIoIAqLcaNWq0T3aGpMmJ0QBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSCnI9QH3Qe+LTkV9UnOsxAKDBWPXTYbkewZEgACBNIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASNI+jaBVq1ZFXl5eLF26tNrtbrrppjjqqKMqvx49enSMGDFiX44CAFCtfRpB5eXlsXbt2ujdu3eN7jd16tSYMWPGvhwl1q5dGyNHjozu3btHfn5+jB07dp8+PgBQv+3TCGrUqFG0bds2CgoK9nh7lmWxc+fO3a5v0aJFlJaW7stR4pNPPolDDz00JkyYEH379t2njw0A1H81jqCnnnoqTjjhhCgtLY1WrVrFmWeeGStWrIiI3V8Oe/bZZyMvLy+efvrp6N+/fxQVFcWCBQt2e8wvvxw2ePDguOqqq+K6666Lgw8+ONq2bRs33XRTlfts3rw5Lr300mjdunWUlJTEKaecEq+99lrl7Z06dYqpU6fGqFGjokWLFjX9NgGABq7GEbRt27a4+uqr4+WXX4558+ZFfn5+nHPOOVFRUfGV97nuuutiypQpsXz58ujTp89ePc8DDzwQzZo1i5deeiluueWWmDx5csydOzciPj+iNGzYsFi3bl3MmTMnFi9eHP369YshQ4bEBx98UNNvqdInn3wSW7ZsqXIBABqmPb9uVY2vf/3rVb6+//77o3Xr1rFs2bJo3rz5Hu8zefLkGDp0aI2ep0+fPjFx4sSIiOjatWv86le/innz5sXQoUNj/vz58frrr8f69eujqKgoIiJ+9rOfxWOPPRazZ8+OSy+9tKbfVkRETJkyJSZNmlSr+wIA9UuNjwStWLEiRo4cGV26dImSkpLo3LlzRESsXr36K+/Tv3//Gg/25SNGZWVlsX79+oiIWLx4cXz00UfRqlWraN68eeVl5cqVlS/N1cb1118fmzdvrrysWbOm1o8FABzYanwkaPjw4VFeXh7Tpk2Ldu3aRUVFRfTu3Tt27Njxlfdp1qxZjQcrLCys8nVeXl7lS24VFRVRVlYWzz777G73+19OsC4qKqo8sgQANGw1iqCNGzfG8uXL4957740TTzwxIiIWLly4XwarTr9+/WLdunVRUFAQnTp1qvPnBwDqvxpFUMuWLaNVq1Zx3333RVlZWaxevTrGjx+/v2b7SqeeemoMHDgwRowYETfffHN079493nvvvZgzZ06MGDGi8uW3Xe9S++ijj2LDhg2xdOnSaNy4cRxxxBF1PjMAcGCpUQTl5+fHrFmz4qqrrorevXtH9+7d44477ojBgwfvp/H2LC8vL+bMmRMTJkyI7373u7Fhw4Zo27ZtfO1rX4s2bdpUbnf00UdX/vfixYtj5syZ0bFjx1i1alWdzgsAHHjysizLcj3EgWrLli3RokWLKB/7fyK/qDjX4wBAg7Hqp8P222Pv2n9v3rw5SkpKvnI7H6AKACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECSRBAAkCQRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASSrI9QD1wT8mnR4lJSW5HgMA2IccCQIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJIkgACBJIggASJIIAgCSJIIAgCSJIAAgSSIIAEiSCAIAkiSCAIAkiSAAIEkiCABIkggCAJIkggCAJBXkeoADWZZlERGxZcuWHE8CAOytXfvtXfvxryKCqrFx48aIiCgvL8/xJABATW3dujVatGjxlbeLoGocfPDBERGxevXqaheRfWvLli1RXl4ea9asiZKSklyPkxRrnxvWPXesfW7s73XPsiy2bt0a7dq1q3Y7EVSN/PzPT5lq0aKF/zlyoKSkxLrniLXPDeueO9Y+N/bnuu/NwQsnRgMASRJBAECSRFA1ioqKYuLEiVFUVJTrUZJi3XPH2ueGdc8da58bB8q652X/7f1jAAANkCNBAECSRBAAkCQRBAAkSQQBAElKPoLuuuuu6Ny5czRp0iSOOeaYWLBgQbXbP/fcc3HMMcdEkyZNokuXLnHPPffU0aQNS03W/ZFHHomhQ4fGoYceGiUlJTFw4MB4+umn63DahqWmP/O7LFq0KAoKCuKoo47avwM2UDVd908++SQmTJgQHTt2jKKiojjssMPiN7/5TR1N27DUdO0ffPDB6Nu3bxQXF0dZWVlcdNFFlR+jxN7529/+FsOHD4927dpFXl5ePPbYY//1PjnZv2YJmzVrVlZYWJhNmzYtW7ZsWTZmzJisWbNm2TvvvLPH7d9+++2suLg4GzNmTLZs2bJs2rRpWWFhYTZ79uw6nrx+q+m6jxkzJrv55puzv//979lbb72VXX/99VlhYWH26quv1vHk9V9N136XTZs2ZV26dMlOO+20rG/fvnUzbANSm3U/66yzsmOPPTabO3dutnLlyuyll17KFi1aVIdTNww1XfsFCxZk+fn52dSpU7O33347W7BgQdarV69sxIgRdTx5/TZnzpxswoQJ2cMPP5xFRPboo49Wu32u9q9JR9CAAQOyyy+/vMp1PXr0yMaPH7/H7a+77rqsR48eVa677LLLsuOOO26/zdgQ1XTd9+SII47IJk2atK9Ha/Bqu/bnnXdedsMNN2QTJ04UQbVQ03X/85//nLVo0SLbuHFjXYzXoNV07W+99dasS5cuVa674447sg4dOuy3GRu6vYmgXO1fk305bMeOHbF48eI47bTTqlx/2mmnxfPPP7/H+7zwwgu7bX/66afHK6+8Ep9++ul+m7Uhqc26f1lFRUVs3bq18gNu2Tu1Xfvp06fHihUrYuLEift7xAapNuv+xz/+Mfr37x+33HJLtG/fPrp16xbXXHNN/Oc//6mLkRuM2qz98ccfH++++27MmTMnsiyLf//73zF79uwYNmxYXYycrFztX5P9ANX3338/Pvvss2jTpk2V69u0aRPr1q3b433WrVu3x+137twZ77//fpSVle23eRuK2qz7l/385z+Pbdu2xbnnnrs/RmywarP2//rXv2L8+PGxYMGCKChI9q+L/0lt1v3tt9+OhQsXRpMmTeLRRx+N999/P6644or44IMPnBdUA7VZ++OPPz4efPDBOO+88+Ljjz+OnTt3xllnnRW//OUv62LkZOVq/5rskaBd8vLyqnydZdlu1/237fd0PdWr6brv8vvf/z5uuummeOihh6J169b7a7wGbW/X/rPPPouRI0fGpEmTolu3bnU1XoNVk5/5ioqKyMvLiwcffDAGDBgQZ5xxRtx2220xY8YMR4NqoSZrv2zZsrjqqqvixhtvjMWLF8dTTz0VK1eujMsvv7wuRk1aLvavyf7T7pBDDolGjRrt9q+B9evX71aju7Rt23aP2xcUFESrVq3226wNSW3WfZeHHnooLr744vjDH/4Qp5566v4cs0Gq6dpv3bo1XnnllViyZElceeWVEfH5zjnLsigoKIhnnnkmTjnllDqZvT6rzc98WVlZtG/fPlq0aFF5Xc+ePSPLsnj33Xeja9eu+3XmhqI2az9lypQYNGhQXHvttRER0adPn2jWrFmceOKJ8eMf/9gR//0kV/vXZI8ENW7cOI455piYO3dulevnzp0bxx9//B7vM3DgwN22f+aZZ6J///5RWFi432ZtSGqz7hGfHwEaPXp0zJw502vztVTTtS8pKYnXX389li5dWnm5/PLLo3v37rF06dI49thj62r0eq02P/ODBg2K9957Lz766KPK6956663Iz8+PDh067Nd5G5LarP327dsjP7/qrrFRo0YR8f+PTLDv5Wz/ul9Puz7A7Xrr5P33358tW7YsGzt2bNasWbNs1apVWZZl2fjx47MLLrigcvtdb+EbN25ctmzZsuz+++/3FvlaqOm6z5w5MysoKMjuvPPObO3atZWXTZs25epbqLdquvZf5t1htVPTdd+6dWvWoUOH7Bvf+Eb2xhtvZM8991zWtWvX7JJLLsnVt1Bv1XTtp0+fnhUUFGR33XVXtmLFimzhwoVZ//79swEDBuTqW6iXtm7dmi1ZsiRbsmRJFhHZbbfdli1ZsqTyVxMcKPvXpCMoy7LszjvvzDp27Jg1btw469evX/bcc89V3nbhhRdmJ510UpXtn3322ezoo4/OGjdunHXq1Cm7++6763jihqEm637SSSdlEbHb5cILL6z7wRuAmv7Mf5EIqr2arvvy5cuzU089NWvatGnWoUOH7Oqrr862b99ex1M3DDVd+zvuuCM74ogjsqZNm2ZlZWXZd77znezdd9+t46nrt/nz51f79/aBsn/NyzLH9wCA9CR7ThAAkDYRBAAkSQQBAEkSQQBAkkQQAJAkEQQAJEkEAQBJEkEAQJJEEACQJBEEACRJBAEASRJBAECS/i/uyuTvT8h/KgAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "feature_importances['Static covariates'].sort_values(by='importance').plot(kind='barh')" @@ -2009,28 +1339,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAn8AAAGdCAYAAABjKhXvAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAmnElEQVR4nO3de3BU5eH/8c+GJBty2ShyCUIgRCTcJAQCyqUDbYGo1GrLoEi4X1osAgGRwogEEE2gIl4qkVKbQAt4QxSVAhElRSgiEAoSEL8IhmosQ9UEQgmQnN8f/jjjkiAJ7mY3+7xfMzuTPefZs8+Tpx3enmTBYVmWJQAAABghyNcTAAAAQO0h/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDBPt6AvA/FRUV+vLLLxUVFSWHw+Hr6QAAgGqwLEunT5/WjTfeqKCgK9/fI/5QyZdffqnY2FhfTwMAAFyDEydOqHnz5lc8T/yhkqioKEnf/Y/H5XL5eDYAAKA6SkpKFBsba/85fiXEHyq59KNel8tF/AEAUMdc7Ve2+MAHAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMEuzrCcB/dUzfpCBnuK+nAQBAwDieOdDXU+DOHwAAgEmIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYi/ADJ37lx17tzZ19MAAAB+jPiroxwOh9544w1fTwMAANQxxB8AAIBBiL8fqW/fvpo0aZLS0tJ0/fXXq0mTJvrTn/6k0tJSjR49WlFRUbrpppv097//3X5NXl6eunfvLqfTqaZNm2rmzJm6ePGi2zUnT56sGTNmqEGDBoqJidHcuXPt83FxcZKkX/3qV3I4HPbzS/76178qLi5O0dHRGjJkiE6fPu3NbwEAAKhDiD8PWLFihRo2bKhdu3Zp0qRJeuCBBzR48GD17NlTe/fuVUpKioYPH66zZ8/qiy++0J133qlu3brpX//6l7KysvTiiy9qwYIFla4ZERGhDz/8UIsWLdL8+fOVm5srSfroo48kSdnZ2SoqKrKfS9LRo0f1xhtv6O2339bbb7+tvLw8ZWZm/uD8y8rKVFJS4vYAAACBifjzgMTERM2ePVs333yzZs2apfr166thw4YaP368br75Zs2ZM0f//e9/tX//fi1dulSxsbH64x//qLZt2+qee+7RvHnztHjxYlVUVNjX7NSpk9LT03XzzTdrxIgRSk5O1pYtWyRJjRo1kiRdd911iomJsZ9LUkVFhXJyctSxY0f95Cc/0fDhw+3XXUlGRoaio6PtR2xsrBe+SwAAwB8Qfx7QqVMn++t69erphhtu0C233GIfa9KkiSTp5MmTOnTokHr06CGHw2Gf79Wrl86cOaN///vfVV5Tkpo2baqTJ09edS5xcXGKioqq0etmzZql4uJi+3HixImrvg8AAKibgn09gUAQEhLi9tzhcLgduxR6FRUVsizLLfwkybIst3FXuub37wzWZC5Xe53T6ZTT6bzqtQEAQN3Hnb9a1r59e+3YscMOPknasWOHoqKi1KxZs2pfJyQkROXl5d6YIgAACGDEXy373e9+pxMnTmjSpEk6fPiw3nzzTaWnp2vatGkKCqr+dsTFxWnLli366quv9M0333hxxgAAIJAQf7WsWbNm2rBhg3bt2qXExERNmDBBY8eO1ezZs2t0ncWLFys3N1exsbFKSkry0mwBAECgcVjf//kjIKmkpOS7T/2mvaIgZ7ivpwMAQMA4njnQa9e+9Od3cXGxXC7XFcdx5w8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYJBgX08A/uvjeSlyuVy+ngYAAPAg7vwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBgn09AfivjumbFOQM9/U0ANRBxzMH+noKAK6AO38AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/qpp7ty5cjgccjgcevrpp+3jDodDb7zxhs/m9UP69u1rz3nfvn2+ng4AAPADxF8NdOjQQUVFRfrNb37j1fc5ePCgBg0apLi4uEqxeUlGRoa6deumqKgoNW7cWPfcc48++eQTtzGvv/66du3a5dW5AgCAuoX4q4Hg4GDFxMQoPDzcq+9z9uxZxcfHKzMzUzExMVWOycvL08SJE7Vz507l5ubq4sWLGjBggEpLS+0xDRo0UKNGjbw6VwAAULcYFX8rV67UDTfcoLKyMrfjgwYN0ogRIzzyHr///e/Vpk0bhYeHKz4+Xo8++qguXLjgNmbBggVq3LixoqKiNG7cOM2cOVOdO3e2z3fr1k1/+MMfNGTIEDmdzirfZ+PGjRo1apQ6dOigxMREZWdnq7CwUHv27PHIOgAAQGAyKv4GDx6s8vJyrV+/3j526tQpvf322xo9erRH3iMqKko5OTkqKCjQM888o+XLl2vJkiX2+VWrVunxxx/XwoULtWfPHrVo0UJZWVk/+n2Li4slfXe3r6bKyspUUlLi9gAAAIHJqPirX7++hg4dquzsbPvYqlWr1Lx5c/Xt29cj7zF79mz17NlTcXFxuuuuu/TQQw/plVdesc8/99xzGjt2rEaPHq02bdpozpw5uuWWW37Ue1qWpWnTpql3797q2LFjjV+fkZGh6Oho+xEbG/uj5gMAAPyXUfEnSePHj9fmzZv1xRdfSJKys7M1atQoORwOj1z/tddeU+/evRUTE6PIyEg9+uijKiwstM9/8skn6t69u9trLn9eUw8++KD279+vNWvWXNPrZ82apeLiYvtx4sSJHzUfAADgv4yLv6SkJCUmJmrlypXau3evDhw4oFGjRnnk2jt37tSQIUN0xx136O2331Z+fr4eeeQRnT9/3m3c5aFpWdY1v+ekSZO0fv16vf/++2revPk1XcPpdMrlcrk9AABAYAr29QR8Ydy4cVqyZIm++OIL9evXz2M/5ty+fbtatmypRx55xD72+eefu41JSEjQrl27NHz4cPvY7t27a/xelmVp0qRJWrdunbZu3apWrVpd+8QBAIAxjIy/1NRUTZ8+XcuXL9fKlSs9dt3WrVursLBQL730krp166Z33nlH69atcxszadIkjR8/XsnJyerZs6defvll7d+/X/Hx8faY8+fPq6CgwP76iy++0L59+xQZGanWrVtLkiZOnKjVq1frzTffVFRUlL766itJUnR0tOrXr++xNQEAgMBi3I99JcnlcmnQoEGKjIzUPffc47Hr3n333Zo6daoefPBBde7cWTt27NCjjz7qNiY1NVWzZs3S9OnT1aVLFx07dkyjRo1SWFiYPebLL79UUlKSkpKSVFRUpCeffFJJSUkaN26cPSYrK0vFxcXq27evmjZtaj9efvllj60HAAAEHiPv/ElSUVGRUlNTr/j36FXX5b+vt2jRIi1atMjtWFpamtvzRx991C0K+/fvb9/Rk6S4uLir/h7gj/k9QQAAYC7j7vx9/fXXeumll/Tee+9p4sSJNXrtgQMHFBkZqaVLl17z+589e1ZPPfWUDh48qMOHDys9PV3vvvuuRo4cec3XvJI77rhDHTp08Ph1AQBA3WXcnb8uXbrom2++0cKFC5WQkGAf79ChQ6UPZ1yybNkyTZ48WcOGDZOkH/VPpjkcDm3YsEELFixQWVmZEhIStHbtWvXr1++ar3klf/7zn/W///1PktSiRQuPXx8AANQ9xsXf8ePHqzy+YcOGSv8M2yVNmjRRVFTUNf3rGZerX7++3n333R99nepo1qxZrbwPAACoO4yLvytp2bKlr6cAAADgdcb9zh8AAIDJiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADBLs6wnAf308L0Uul8vX0wAAAB7EnT8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGCQYF9PAP6rY/omBTnDfT0NAN9zPHOgr6cAoI7jzh8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEE8Gn9bt26Vw+HQt99+68nL+kygrQcAAIA7fx6Sk5Oj6667ztfTcOOPcwIAAL5V5+Pv/Pnzvp6CR1mWpYsXL/p6GgAAIEDVOP7Kyso0efJkNW7cWGFhYerdu7c++ugjtzHbt29XYmKiwsLCdOutt+rAgQP2uc8//1x33XWXrr/+ekVERKhDhw7asGGDfb6goEB33nmnIiMj1aRJEw0fPlynTp2yz/ft21cPPvigpk2bpoYNG6p///66//77NWTIELc5XLhwQQ0bNlR2drak76Jq0aJFio+PV/369ZWYmKjXXnvN7TUbNmxQmzZtVL9+ff30pz/V8ePHq/U92bp1q0aPHq3i4mI5HA45HA7NnTtXkvS3v/1NycnJioqKUkxMjIYOHaqTJ0+6vdbhcGjTpk1KTk6W0+nUtm3bdPr0aaWmpioiIkJNmzbVkiVL1LdvX6WlpdmvPX/+vGbMmKFmzZopIiJCt956q7Zu3XrVOQEAAHPVOP5mzJihtWvXasWKFdq7d69at26tlJQUff311/aYhx9+WE8++aQ++ugjNW7cWL/85S914cIFSdLEiRNVVlamf/zjHzpw4IAWLlyoyMhISVJRUZH69Omjzp07a/fu3dq4caP+85//6N5773Wbw4oVKxQcHKzt27dr2bJlSk1N1fr163XmzBl7zKZNm1RaWqpBgwZJkmbPnq3s7GxlZWXp4MGDmjp1qoYNG6a8vDxJ0okTJ/TrX/9ad955p/bt26dx48Zp5syZ1fqe9OzZU08//bRcLpeKiopUVFSk6dOnS/ou0B577DH961//0htvvKFjx45p1KhRVX5fMzIydOjQIXXq1EnTpk3T9u3btX79euXm5mrbtm3au3ev22tGjx6t7du366WXXtL+/fs1ePBg3X777fr0009/cE6XKysrU0lJidsDAAAEpuCaDC4tLVVWVpZycnJ0xx13SJKWL1+u3Nxcvfjii+rWrZskKT09Xf3795f0Xag1b95c69at07333qvCwkINGjRIt9xyiyQpPj7evn5WVpa6dOmiJ554wj72l7/8RbGxsTpy5IjatGkjSWrdurUWLVpkj7npppsUERGhdevWafjw4ZKk1atX66677pLL5VJpaameeuopvffee+rRo4f9vh988IGWLVumPn36KCsrS/Hx8VqyZIkcDocSEhLsOL2a0NBQRUdHy+FwKCYmxu3cmDFj7K/j4+P17LPPqnv37jpz5owdvZI0f/58+3t2+vRprVixQqtXr9bPf/5zSVJ2drZuvPFGe/zRo0e1Zs0a/fvf/7aPT58+XRs3blR2draeeOKJK87pchkZGZo3b95V1wkAAOq+Gt35O3r0qC5cuKBevXrZx0JCQtS9e3cdOnTIPnYpsCSpQYMGSkhIsM9PnjxZCxYsUK9evZSenq79+/fbY/fs2aP3339fkZGR9qNt27b2e1+SnJzsNq+QkBANHjxYq1atkvRdpL755ptKTU2V9N2Pks+dO6f+/fu7XXvlypX2dQ8dOqTbbrtNDoejynVcq/z8fN19991q2bKloqKi1LdvX0lSYWGh27jvr+mzzz7ThQsX1L17d/tYdHS0EhIS7Od79+6VZVlq06aN25ry8vLcvlfVMWvWLBUXF9uPEydOXMNKAQBAXVCjO3+WZUmSWyBdOn75sctdOj9u3DilpKTonXfe0ebNm5WRkaHFixdr0qRJqqio0F133VXl3bamTZvaX0dERFQ6n5qaqj59+ujkyZPKzc1VWFiYfXeyoqJCkvTOO++oWbNmbq9zOp1ua/Ok0tJSDRgwQAMGDNDf/vY3NWrUSIWFhUpJSan0QZXvr+mHvs+XVFRUqF69etqzZ4/q1avnNu77dxSrw+l02t8HAAAQ2Gp0569169YKDQ3VBx98YB+7cOGCdu/erXbt2tnHdu7caX/9zTff6MiRI/YdPEmKjY3VhAkT9Prrr+uhhx7S8uXLJUldunTRwYMHFRcXp9atW7s9qgq+7+vZs6diY2P18ssva9WqVRo8eLBCQ0MlSe3bt5fT6VRhYWGl68bGxtpjvj/vy9dxNaGhoSovL3c7dvjwYZ06dUqZmZn6yU9+orZt27p92ONKbrrpJoWEhGjXrl32sZKSEn366af286SkJJWXl+vkyZOV1nTpx7xVzQkAAJitRvEXERGhBx54QA8//LA2btyogoICjR8/XmfPntXYsWPtcfPnz9eWLVv08ccfa9SoUWrYsKHuueceSVJaWpo2bdqkY8eOae/evXrvvffscJw4caK+/vpr3X///dq1a5c+++wzbd68WWPGjLlqxDgcDg0dOlQvvPCCcnNzNWzYMPtcVFSUpk+frqlTp2rFihU6evSo8vPz9fzzz2vFihWSpAkTJujo0aOaNm2aPvnkE61evVo5OTnV/t7ExcXpzJkz2rJli06dOqWzZ8+qRYsWCg0N1XPPPafPPvtM69ev12OPPXbVa0VFRWnkyJF6+OGH9f777+vgwYMaM2aMgoKC7LuBbdq0UWpqqkaMGKHXX39dx44d00cffaSFCxfan56uak4AAMBsNf60b2ZmpgYNGqThw4erS5cu+r//+z9t2rRJ119/vduYKVOmqGvXrioqKtL69evtu3Dl5eWaOHGi2rVrp9tvv10JCQlaunSpJOnGG2/U9u3bVV5erpSUFHXs2FFTpkxRdHS0goKuPtXU1FQVFBSoWbNmbr+XKEmPPfaY5syZo4yMDLVr104pKSl666231KpVK0lSixYttHbtWr311ltKTEzUCy+84PbBk6vp2bOnJkyYoPvuu0+NGjXSokWL1KhRI+Xk5OjVV19V+/btlZmZqSeffLJa13vqqafUo0cP/eIXv1C/fv3Uq1cvtWvXTmFhYfaY7OxsjRgxQg899JASEhL0y1/+Uh9++KF9N7OqOQEAALM5LG/8shs8rrS0VM2aNdPixYvd7rJ6Q0lJiaKjoxWb9oqCnOFefS8ANXM8c6CvpwDAT13687u4uFgul+uK42r0gQ/Unvz8fB0+fFjdu3dXcXGx5s+fL0m6++67fTwzAABQl9X5f96tttxxxx1uf6XK9x81+fFwTTz55JNKTExUv379VFpaqm3btqlhw4ZeeS8AAGAG7vxV05///Gf973//q/JcgwYNPP5+SUlJ2rNnj8evCwAAzEb8VdPlfz8gAABAXcSPfQEAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgkGBfTwD+6+N5KXK5XL6eBgAA8CDu/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGCfT0B+K+O6ZsU5Az39TQAv3E8c6CvpwAAPxp3/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8Bbi5c+eqc+fOvp4GAADwE8Sfl/Xt21dpaWm+ngYAAIAk4s/nLMvSxYsXfT0NAABgCOLPi0aNGqW8vDw988wzcjgccjgcysnJkcPh0KZNm5ScnCyn06lt27bJsiwtWrRI8fHxql+/vhITE/Xaa6/Z19q6dascDoe2bNmi5ORkhYeHq2fPnvrkk0/c3jMzM1NNmjRRVFSUxo4dq3PnztX2sgEAgB8j/rzomWeeUY8ePTR+/HgVFRWpqKhIsbGxkqQZM2YoIyNDhw4dUqdOnTR79mxlZ2crKytLBw8e1NSpUzVs2DDl5eW5XfORRx7R4sWLtXv3bgUHB2vMmDH2uVdeeUXp6el6/PHHtXv3bjVt2lRLly696jzLyspUUlLi9gAAAIEp2NcTCGTR0dEKDQ1VeHi4YmJiJEmHDx+WJM2fP1/9+/eXJJWWluqpp57Se++9px49ekiS4uPj9cEHH2jZsmXq06ePfc3HH3/cfj5z5kwNHDhQ586dU1hYmJ5++mmNGTNG48aNkyQtWLBA77777lXv/mVkZGjevHmeXTwAAPBL3PnzkeTkZPvrgoICnTt3Tv3791dkZKT9WLlypY4ePer2uk6dOtlfN23aVJJ08uRJSdKhQ4fseLzk8udVmTVrloqLi+3HiRMnrnldAADAv3Hnz0ciIiLsrysqKiRJ77zzjpo1a+Y2zul0uj0PCQmxv3Y4HG6vv1ZOp7PS+wAAgMBE/HlZaGioysvLf3BM+/bt5XQ6VVhY6PYj3ppq166ddu7cqREjRtjHdu7cec3XAwAAgYf487K4uDh9+OGHOn78uCIjI6u8SxcVFaXp06dr6tSpqqioUO/evVVSUqIdO3YoMjJSI0eOrNZ7TZkyRSNHjlRycrJ69+6tVatW6eDBg4qPj/f0sgAAQB3F7/x52fTp01WvXj21b99ejRo1UmFhYZXjHnvsMc2ZM0cZGRlq166dUlJS9NZbb6lVq1bVfq/77rtPc+bM0e9//3t17dpVn3/+uR544AFPLQUAAAQAh2VZlq8nAf9SUlKi6Ohoxaa9oiBnuK+nA/iN45kDfT0FALiiS39+FxcXy+VyXXEcd/4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYJ9vUE4L8+npcil8vl62kAAAAP4s4fAACAQYg/AAAAgxB/AAAABiH+AAAADEL8AQAAGIT4AwAAMAjxBwAAYBDiDwAAwCDEHwAAgEGIPwAAAIMQfwAAAAYh/gAAAAxC/AEAABiE+AMAADAI8QcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYJNjXE4D/sSxLklRSUuLjmQAAgOq69Of2pT/Hr4T4QyX//e9/JUmxsbE+ngkAAKip06dPKzo6+orniT9U0qBBA0lSYWHhD/6Pp64rKSlRbGysTpw4IZfL5evpeBVrDUymrNWUdUqsNVDV1loty9Lp06d14403/uA44g+VBAV996ug0dHRAf9/SElyuVxGrFNirYHKlLWask6JtQaq2lhrdW7a8IEPAAAAgxB/AAAABiH+UInT6VR6erqcTqevp+JVpqxTYq2BypS1mrJOibUGKn9bq8O62ueBAQAAEDC48wcAAGAQ4g8AAMAgxB8AAIBBiD8AAACDEH8GWLp0qVq1aqWwsDB17dpV27Zt+8HxeXl56tq1q8LCwhQfH68XXnih0pi1a9eqffv2cjqdat++vdatW+et6deIp9eak5Mjh8NR6XHu3DlvLqNaarLWoqIiDR06VAkJCQoKClJaWlqV4/xxXz29zkDZ09dff139+/dXo0aN5HK51KNHD23atKnSOH/cU8nzaw2Uff3ggw/Uq1cv3XDDDapfv77atm2rJUuWVBrnj/vq6XUGyp5+3/bt2xUcHKzOnTtXOlere2ohoL300ktWSEiItXz5cqugoMCaMmWKFRERYX3++edVjv/ss8+s8PBwa8qUKVZBQYG1fPlyKyQkxHrttdfsMTt27LDq1atnPfHEE9ahQ4esJ554wgoODrZ27txZW8uqkjfWmp2dbblcLquoqMjt4Ws1XeuxY8esyZMnWytWrLA6d+5sTZkypdIYf9xXb6wzUPZ0ypQp1sKFC61du3ZZR44csWbNmmWFhIRYe/futcf4455alnfWGij7unfvXmv16tXWxx9/bB07dsz661//aoWHh1vLli2zx/jjvnpjnYGyp5d8++23Vnx8vDVgwAArMTHR7Vxt7ynxF+C6d+9uTZgwwe1Y27ZtrZkzZ1Y5fsaMGVbbtm3djv32t7+1brvtNvv5vffea91+++1uY1JSUqwhQ4Z4aNbXxhtrzc7OtqKjoz0+1x+rpmv9vj59+lQZRf64r95YZyDu6SXt27e35s2bZz/3xz21LO+sNZD39Ve/+pU1bNgw+7k/7qs31hloe3rfffdZs2fPttLT0yvFX23vKT/2DWDnz5/Xnj17NGDAALfjAwYM0I4dO6p8zT//+c9K41NSUrR7925duHDhB8dc6Zq1wVtrlaQzZ86oZcuWat68uX7xi18oPz/f8wuogWtZa3X42756a51SYO5pRUWFTp8+rQYNGtjH/G1PJe+tVQrMfc3Pz9eOHTvUp08f+5i/7au31ikFzp5mZ2fr6NGjSk9Pr/J8be8p8RfATp06pfLycjVp0sTteJMmTfTVV19V+ZqvvvqqyvEXL17UqVOnfnDMla5ZG7y11rZt2yonJ0fr16/XmjVrFBYWpl69eunTTz/1zkKq4VrWWh3+tq/eWmeg7unixYtVWlqqe++91z7mb3sqeW+tgbavzZs3l9PpVHJysiZOnKhx48bZ5/xtX721zkDZ008//VQzZ87UqlWrFBwcXOWY2t7TqmeBgOJwONyeW5ZV6djVxl9+vKbXrC2eXuttt92m2267zT7fq1cvdenSRc8995yeffZZT037mnhjD/xxXz09p0Dc0zVr1mju3Ll688031bhxY49c09s8vdZA29dt27bpzJkz2rlzp2bOnKnWrVvr/vvv/1HX9DZPrzMQ9rS8vFxDhw7VvHnz1KZNG49c0xOIvwDWsGFD1atXr9J/OZw8ebLSf2FcEhMTU+X44OBg3XDDDT845krXrA3eWuvlgoKC1K1bN5/+l+e1rLU6/G1fvbXOy9X1PX355Zc1duxYvfrqq+rXr5/bOX/bU8l7a71cXd/XVq1aSZJuueUW/ec//9HcuXPtKPK3ffXWOi9XF/f09OnT2r17t/Lz8/Xggw9K+u7XFizLUnBwsDZv3qyf/exntb6n/Ng3gIWGhqpr167Kzc11O56bm6uePXtW+ZoePXpUGr9582YlJycrJCTkB8dc6Zq1wVtrvZxlWdq3b5+aNm3qmYlfg2tZa3X42756a52Xq8t7umbNGo0aNUqrV6/WwIEDK533tz2VvLfWy9Xlfb2cZVkqKyuzn/vbvnprnVWdr2t76nK5dODAAe3bt89+TJgwQQkJCdq3b59uvfVWST7YU698jAR+49JH0l988UWroKDASktLsyIiIqzjx49blmVZM2fOtIYPH26Pv/TXn0ydOtUqKCiwXnzxxUp//cn27dutevXqWZmZmdahQ4eszMxMn/81A5blnbXOnTvX2rhxo3X06FErPz/fGj16tBUcHGx9+OGHtb6+76vpWi3LsvLz8638/Hyra9eu1tChQ638/Hzr4MGD9nl/3FdvrDNQ9nT16tVWcHCw9fzzz7v9NRjffvutPcYf99SyvLPWQNnXP/7xj9b69eutI0eOWEeOHLH+8pe/WC6Xy3rkkUfsMf64r95YZ6Ds6eWq+rRvbe8p8WeA559/3mrZsqUVGhpqdenSxcrLy7PPjRw50urTp4/b+K1bt1pJSUlWaGioFRcXZ2VlZVW65quvvmolJCRYISEhVtu2ba21a9d6exnV4um1pqWlWS1atLBCQ0OtRo0aWQMGDLB27NhRG0u5qpquVVKlR8uWLd3G+OO+enqdgbKnffr0qXKtI0eOdLumP+6pZXl+rYGyr88++6zVoUMHKzw83HK5XFZSUpK1dOlSq7y83O2a/rivnl5noOzp5aqKP8uq3T11WNb//w13AAAABDx+5w8AAMAgxB8AAIBBiD8AAACDEH8AAAAGIf4AAAAMQvwBAAAYhPgDAAAwCPEHAABgEOIPAADAIMQfAACAQYg/AAAAgxB/AAAABvl/rLUmxYXrZtsAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "feature_importances['Past variable importance over time'].mean().sort_values().plot(kind='barh')" @@ -2047,28 +1356,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk8AAAGdCAYAAAAL2ZfXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAaGUlEQVR4nO3de5Ddg/n48WdjY5Owu+5JJCsrRFSQRNzpl6lg1LRl0GoIMYlWEUKNJhMkOjokdekFKSlxqaBFMC6tUMkgJUXUJUjrGpJUg1xcJiT5/P7w2x1bKXk25+wm6/WaOTPZz37OOc8+c7L7nrNndyuKoigCAIDV0q61BwAAWJeIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIKGytQdoa1auXBnz5s2L6urqqKioaO1xAIDVUBRFLF26NLbccsto1+7Ln1sSTyU2b968qKura+0xAIBmmDt3bnTv3v1LzxFPJVZdXR0Rny2/pqamlacBAFbHkiVLoq6urvHr+JcRTyXW8K26mpoa8QQA65jVecmNF4wDACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkFDZ2gO0VTuO+Uu0q+rU2mMAQJvx+kWHtvYIEeGZJwCAFPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJLRIPI0dOzYqKiqioqIifvWrXzUer6ioiDvvvLMlRkjbf//9G2d+5plnWnscAGAt0WLPPPXp0yfmz58fP/rRj8p6Py+88EIcccQRUV9f/4VYa3DhhRfGbrvtFtXV1bHFFlvEYYcdFi+//HKTc+64446YOXNmWWcFANY9LRZPlZWV0aVLl+jUqVNZ7+ejjz6Knj17xkUXXRRdunRZ5TnTp0+PU045JR5//PGYOnVqLF++PA466KD48MMPG8/ZZJNNYvPNNy/rrADAume14+mGG26ITTfdNJYtW9bk+BFHHBHHHXdcSYb52c9+Ftttt1106tQpevbsGeeee258+umnTc654IILYosttojq6uoYNmxYjBw5Mvr169f4/t122y1++ctfxtFHHx1VVVWrvJ8///nPMWTIkOjTp0/07ds3Jk2aFG+++WY89dRTJfk4AIC2a7Xj6aijjooVK1bE3Xff3Xhs4cKFcc8998QJJ5xQkmGqq6vjuuuui9mzZ8evf/3rmDhxYlx22WWN77/pppviF7/4RYwbNy6eeuqp2GqrrWLChAlrfL+LFy+OiM+ebcpatmxZLFmypMkFAGi7VjueOnbsGIMGDYpJkyY1Hrvpppuie/fusf/++5dkmHPOOSf23nvvqK+vj+985zvx05/+NP74xz82vv+3v/1tDB06NE444YTYbrvt4rzzzouddtppje6zKIo488wzY999940dd9wxff0LL7wwamtrGy91dXVrNA8AsHZLvebpxBNPjAceeCDefvvtiIiYNGlSDBkyJCoqKkoyzG233Rb77rtvdOnSJTbccMM499xz480332x8/8svvxy77757k+v899tZp556ajz77LNx8803N+v6o0aNisWLFzde5s6du0bzAABrt1Q89e/fP/r27Rs33HBDPP300/Hcc8/FkCFDSjLI448/HkcffXQccsghcc8998SsWbNi9OjR8cknnzQ5779DrSiKZt/n8OHD4+67746HH344unfv3qzbqKqqipqamiYXAKDtqsxeYdiwYXHZZZfF22+/HQMHDizZt6kee+yx6NGjR4wePbrx2BtvvNHknN69e8fMmTNj8ODBjceefPLJ9H0VRRHDhw+PKVOmxLRp02Lrrbdu/uAAwNdKOp6OOeaYOOuss2LixIlxww03lGyQbbfdNt5888245ZZbYrfddot77703pkyZ0uSc4cOHx4knnhi77rpr7L333nHrrbfGs88+Gz179mw855NPPonZs2c3/vvtt9+OZ555JjbccMPYdtttIyLilFNOicmTJ8ddd90V1dXVsWDBgoiIqK2tjY4dO5bsYwIA2p7073mqqamJI444IjbccMM47LDDSjbI9773vTjjjDPi1FNPjX79+sWMGTPi3HPPbXLOMcccE6NGjYqzzjordtlll3jttddiyJAh0aFDh8Zz5s2bF/3794/+/fvH/Pnz4+KLL47+/fvHsGHDGs+ZMGFCLF68OPbff//o2rVr4+XWW28t2ccDALRN6WeeIiLmz58fxxxzzP/8PUqr679frzR+/PgYP358k2MjRoxo8va5557bJKoOPPDAxmeUIiLq6+u/8nVQa/I6KQDg6y31zNN7770Xt9xyS/z1r3+NU045JXVHzz33XGy44YZx5ZVXpq73eR999FFceuml8cILL8RLL70UY8aMiQcffDCOP/74Zt/m/3LIIYdEnz59Sn67AMC6LfXM0y677BLvv/9+jBs3Lnr37t14vE+fPl94cXeDq666Kk477bQ49thjIyLW6E+eVFRUxH333RcXXHBBLFu2LHr37h233357DBw4sNm3+b/8/ve/j48//jgiIrbaaquS3z4AsG5KxdPrr7++yuP33XffF/6MSoPOnTtHdXV1s35793/r2LFjPPjgg2t8O6ujW7duLXI/AMC6pVmvefpvPXr0KMXNAACs9dI/bQcA8HUmngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAQmVrD9BWPX/+wVFTU9PaYwAAJeaZJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQEJlaw/QVu045i/RrqpTa48BJfH6RYe29ggAaw3PPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8fc7YsWOjX79+rT0GALAW+9rGU0VFRdx5552tPQYAsI752sYTAEBztHo87b///jF8+PAYMWJEbLzxxtG5c+e4+uqr48MPP4wTTjghqqurY5ttton777+/8TrTp0+P3XffPaqqqqJr164xcuTIWL58eZPbPO200+Lss8+OTTbZJLp06RJjx45tfH99fX1ERBx++OFRUVHR+HaDG2+8Merr66O2tjaOPvroWLp0aTlXAACsQ1o9niIirr/++thss81i5syZMXz48PjJT34SRx11VOy9997x9NNPx8EHHxyDBw+Ojz76KN5+++349re/Hbvttlv84x//iAkTJsQ111wTF1xwwRduc4MNNognnngixo8fHz//+c9j6tSpERHx97//PSIiJk2aFPPnz298OyLilVdeiTvvvDPuueeeuOeee2L69Olx0UUX/c/Zly1bFkuWLGlyAQDarrUinvr27RvnnHNO9OrVK0aNGhUdO3aMzTbbLE488cTo1atXnHfeefHuu+/Gs88+G1deeWXU1dXF5ZdfHttvv30cdthhcf7558cll1wSK1eubLzNnXfeOcaMGRO9evWK4447Lnbdddd46KGHIiJi8803j4iIjTbaKLp06dL4dkTEypUr47rrrosdd9wxvvnNb8bgwYMbr7cqF154YdTW1jZe6urqyrQlAGBtsFbE084779z47/XWWy823XTT2GmnnRqPde7cOSIi3nnnnXjxxRdjr732ioqKisb377PPPvHBBx/EW2+9tcrbjIjo2rVrvPPOO185S319fVRXV6/29UaNGhWLFy9uvMydO/cr7wMAWHdVtvYAERHt27dv8nZFRUWTYw2htHLlyiiKokk4RUQURdHkvP91m59/Ziozy5ddr6qqKqqqqr7ydgGAtmGteOYpY4cddogZM2Y0BlNExIwZM6K6ujq6deu22rfTvn37WLFiRTlGBADasHUunk4++eSYO3duDB8+PF566aW46667YsyYMXHmmWdGu3ar/+HU19fHQw89FAsWLIj333+/jBMDAG3JOhdP3bp1i/vuuy9mzpwZffv2jZNOOimGDh0a55xzTup2Lrnkkpg6dWrU1dVF//79yzQtANDWVBSf//4Xa2zJkiWf/dTdiD9Gu6pOrT0OlMTrFx3a2iMAlFXD1+/FixdHTU3Nl567zj3zBADQmsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAICEytYeoK16/vyDo6amprXHAABKzDNPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCABPEEAJAgngAAEsQTAECCeAIASBBPAAAJ4gkAIEE8AQAkiCcAgATxBACQIJ4AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAniCQAgQTwBACSIJwCAhMrWHqCtKYoiIiKWLFnSypMAAKur4et2w9fxLyOeSuzdd9+NiIi6urpWngQAyFq6dGnU1tZ+6TniqcQ22WSTiIh48803v3L5bdWSJUuirq4u5s6dGzU1Na09TquxBztoYA92EGEHDdbWPRRFEUuXLo0tt9zyK88VTyXWrt1nLyOrra1dqx4UraGmpuZrv4MIe4iwgwb2YAcRdtBgbdzD6j7p4QXjAAAJ4gkAIEE8lVhVVVWMGTMmqqqqWnuUVmMHn7EHO2hgD3YQYQcN2sIeKorV+Zk8AAAiwjNPAAAp4gkAIEE8AQAkiCcAgATx1AxXXnllbL311tGhQ4cYMGBAPPLII196/vTp02PAgAHRoUOH6NmzZ/zud79roUnLJ7OD+fPnx6BBg6J3797Rrl27GDFiRMsNWmaZPdxxxx1x4IEHxuabbx41NTWx1157xV/+8pcWnLY8Mjt49NFHY5999olNN900OnbsGNtvv31cdtllLThteWQ/JzR47LHHorKyMvr161feAVtIZg/Tpk2LioqKL1xeeumlFpy49LKPhWXLlsXo0aOjR48eUVVVFdtss01ce+21LTRt+WT2MGTIkFU+Fvr06dOCEycVpNxyyy1F+/bti4kTJxazZ88uTj/99GKDDTYo3njjjVWe/+qrrxadOnUqTj/99GL27NnFxIkTi/bt2xe33XZbC09eOtkdvPbaa8Vpp51WXH/99UW/fv2K008/vWUHLpPsHk4//fRi3LhxxcyZM4s5c+YUo0aNKtq3b188/fTTLTx56WR38PTTTxeTJ08unn/++eK1114rbrzxxqJTp07FVVdd1cKTl052Bw0WLVpU9OzZszjooIOKvn37tsywZZTdw8MPP1xERPHyyy8X8+fPb7wsX768hScvneY8Fr773e8We+yxRzF16tTitddeK5544onisccea8GpSy+7h0WLFjV5DMydO7fYZJNNijFjxrTs4AniKWn33XcvTjrppCbHtt9++2LkyJGrPP/ss88utt9++ybHfvzjHxd77rln2WYst+wOPm+//fZrM/G0JntosMMOOxTnn39+qUdrMaXYweGHH14ce+yxpR6txTR3Bz/4wQ+Kc845pxgzZkybiKfsHhri6f3332+B6VpGdgf3339/UVtbW7z77rstMV6LWdPPC1OmTCkqKiqK119/vRzjlYRv2yV88skn8dRTT8VBBx3U5PhBBx0UM2bMWOV1/va3v33h/IMPPjiefPLJ+PTTT8s2a7k0ZwdtUSn2sHLlyli6dGnjH5Ne15RiB7NmzYoZM2bEfvvtV44Ry665O5g0aVK88sorMWbMmHKP2CLW5LHQv3//6Nq1axxwwAHx8MMPl3PMsmrODu6+++7YddddY/z48dGtW7fYbrvt4qyzzoqPP/64JUYui1J8Xrjmmmti4MCB0aNHj3KMWBL+MHDCwoULY8WKFdG5c+cmxzt37hwLFixY5XUWLFiwyvOXL18eCxcujK5du5Zt3nJozg7aolLs4ZJLLokPP/wwvv/975djxLJbkx107949/vOf/8Ty5ctj7NixMWzYsHKOWjbN2cE///nPGDlyZDzyyCNRWdk2PgU3Zw9du3aNq6++OgYMGBDLli2LG2+8MQ444ICYNm1a/N///V9LjF1SzdnBq6++Go8++mh06NAhpkyZEgsXLoyTTz453nvvvXX2dU9r+rlx/vz5cf/998fkyZPLNWJJtI3/uS2soqKiydtFUXzh2Fedv6rj65LsDtqq5u7h5ptvjrFjx8Zdd90VW2yxRbnGaxHN2cEjjzwSH3zwQTz++OMxcuTI2HbbbeOHP/xhOccsq9XdwYoVK2LQoEFx/vnnx3bbbddS47WYzGOhd+/e0bt378a399prr5g7d25cfPHF62Q8NcjsYOXKlVFRURE33XRT1NbWRkTEpZdeGkceeWRcccUV0bFjx7LPWy7N/dx43XXXxUYbbRSHHXZYmSYrDfGUsNlmm8V66633hXp+5513vlDZDbp06bLK8ysrK2PTTTct26zl0pwdtEVrsodbb701hg4dGn/6059i4MCB5RyzrNZkB1tvvXVEROy0007x73//O8aOHbtOxlN2B0uXLo0nn3wyZs2aFaeeempEfPYFtCiKqKysjAceeCC+9a1vtcjspVSqzwt77rln/OEPfyj1eC2iOTvo2rVrdOvWrTGcIiK+8Y1vRFEU8dZbb0WvXr3KOnM5rMljoSiKuPbaa2Pw4MGx/vrrl3PMNeY1Twnrr79+DBgwIKZOndrk+NSpU2Pvvfde5XX22muvL5z/wAMPxK677hrt27cv26zl0pwdtEXN3cPNN98cQ4YMicmTJ8ehhx5a7jHLqlSPhaIoYtmyZaUer0Vkd1BTUxPPPfdcPPPMM42Xk046KXr37h3PPPNM7LHHHi01ekmV6rEwa9asde6lDA2as4N99tkn5s2bFx988EHjsTlz5kS7du2ie/fuZZ23XNbksTB9+vT417/+FUOHDi3niKXRKi9TX4c1/AjmNddcU8yePbsYMWJEscEGGzT+VMDIkSOLwYMHN57f8KsKzjjjjGL27NnFNddc02Z+VcHq7qAoimLWrFnFrFmzigEDBhSDBg0qZs2aVbzwwgutMX7JZPcwefLkorKysrjiiiua/FjuokWLWutDWGPZHVx++eXF3XffXcyZM6eYM2dOce211xY1NTXF6NGjW+tDWGPN+f/weW3lp+2ye7jsssuKKVOmFHPmzCmef/75YuTIkUVEFLfffntrfQhrLLuDpUuXFt27dy+OPPLI4oUXXiimT59e9OrVqxg2bFhrfQgl0dz/E8cee2yxxx57tPS4zSKemuGKK64oevToUay//vrFLrvsUkyfPr3xfccff3yx3377NTl/2rRpRf/+/Yv111+/qK+vLyZMmNDCE5dedgcR8YVLjx49WnboMsjsYb/99lvlHo4//viWH7yEMjv4zW9+U/Tp06fo1KlTUVNTU/Tv37+48sorixUrVrTC5KWT/f/weW0lnooit4dx48YV22yzTdGhQ4di4403Lvbdd9/i3nvvbYWpSyv7WHjxxReLgQMHFh07diy6d+9enHnmmcVHH33UwlOXXnYPixYtKjp27FhcffXVLTxp81QUxf9/9TIAAF/Ja54AABLEEwBAgngCAEgQTwAACeIJACBBPAEAJIgnAIAE8QQAkCCeAAASxBMAQIJ4AgBIEE8AAAn/D6Gq8x6JKPDnAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "feature_importances['Future variable importance over time'].mean().sort_values().plot(kind='barh')" @@ -2093,18 +1381,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABlUAAANVCAYAAADhqHiEAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAABtYklEQVR4nOzdeXgV5dk/8DuEkBAWkX2RAlbFDTfwbQEtqBVFxH1psbIIVotKEbV1KQJqxWJraVVo3cAN645WcaFVrHWpS8UNtfZVRAVEcQFFIJD5/eEveQkJMAnBTODzuS4uPc+Z5Z65z3Noz9eZyUmSJAkAAAAAAADWq05NFwAAAAAAAFAbCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAIAt0tSpUyMnJ6fCP2effXaltjVnzpwYO3ZszJ07d9MUW0vMnTs3cnJyYurUqZVed9asWZGTkxN33XXXBpcdO3Zs5OTkVKHC9e971qxZ1bbNb9u0adNi4sSJNV3GFmd9c3/w4MHRsWPHb72m2iYnJyfGjh1bI/tetmxZjB07dpPM/bTfKyXfZ5988km111AVTz/9dIwdOzY+//zzcu9NmjSpSt/v1V1H7969o3fv3t9KHQAAFalb0wUAANSkKVOmxI477lhmrG3btpXaxpw5c2LcuHHRu3fvLfpH1DZt2sQzzzwT3/3ud2u6lErZa6+94plnnomdd965pkupsmnTpsVrr70WI0eOrOlStijrm/ujR4+On//85zVTGKksW7Ysxo0bFxHhR/r/7+mnn45x48bF4MGDo0mTJmXemzRpUjRv3jwGDx5c43UAANQkoQoAsEXbddddo1u3bjVdRoWKiooiJycn6tbN9v9kW716daxatSry8/Pj+9//fk2XU2mNGzeulXVHfPOjcGFhYU2XsVmr6jmubeHi5qC2fGeycWpzAA4AbB7c/gsAYB3WdVuajh07lv6XulOnTo1jjz02IiL222+/0luIldwiZc1l17T27UtKbhVz8803x1lnnRXt2rWL/Pz8+O9//xsREX/729/igAMOiMaNG0dhYWH07Nkz/v73v6+3/o8//jjq1asXo0ePLvfem2++GTk5OfHHP/6xdNnhw4fHzjvvHA0bNoyWLVvG/vvvH08++WSZ9Upu8TVhwoS45JJLolOnTpGfnx+PP/54hbf/+u9//xtDhgyJ7bffPgoLC6Ndu3bRv3//ePXVVyusefny5TFq1Kho3bp11K9fP3r16hUvvfTSeo+zxO233x7du3ePBg0aRMOGDeOggw5KtW5Ft+kZPHhwNGzYMN5888046KCDokGDBtGmTZu47LLLIiLi2WefjX322ScaNGgQO+ywQ9x4441ltllye7mZM2fGkCFDomnTptGgQYPo379/vPPOO+VquOGGG2L33XePgoKCaNq0aRx55JHxxhtvlFmmpKZXX301+vTpE40aNYoDDjggevfuHQ8++GC89957ZW5jV2LcuHHxve99L5o2bRqNGzeOvfbaK66//vpIkqTM9jt27BiHHnpoPPzww7HXXntF/fr1Y8cdd4wbbrihXL0ffvhh/PSnP4327dtHvXr1om3btnHMMcfERx99VLrMkiVL4uyzz45OnTpFvXr1ol27djFy5Mj46quvNtiTNOdk4sSJkZOTUzpH1vTLX/4y6tWrV+aWSmnmUMmtmP7973/HMcccE1tvvfU6w5ENzf2Kbv+Vk5MTp59+ekyZMiU6d+4c9evXj27dusWzzz4bSZLE5ZdfHp06dYqGDRvG/vvvX+GxVeW7IOL/Pue33HJLqjl2//33R/fu3aOwsDAaNWoUBx54YDzzzDMVnq/XX389fvzjH8dWW20VrVq1ipNOOim++OKLMssuWbIkTj755GjWrFk0bNgwDj744PjPf/5TYa1vv/12DBgwIFq2bBn5+fmx0047xdVXX13h8VT0nZnm+2zu3LnRokWLiPhmjpT0b83v6zR1RHzzfXrwwQdHYWFhNG/ePE499dRYunTp+huylvfffz+OOuqoaNy4cWy11Vbxk5/8JD7++OPS94cOHRpNmzaNZcuWlVt3//33j1122WW92585c2Ycfvjhsc0220RBQUFst912ccopp5SZI2PHjo1zzjknIiI6depUek5mzZoVHTt2jNdffz2eeOKJ0vE1P99p53vJHLj55ptjp512isLCwth9993jgQceSFVHRMW3//r0009j+PDh0a5du6hXr15su+22ccEFF8SKFSsqvX8AgA0RqgAAW7SSqyzW/FMZ/fr1i0svvTQiIq6++up45pln4plnnol+/fpVqZ7zzjsv5s2bF3/605/ir3/9a7Rs2TJuueWW6NOnTzRu3DhuvPHGuOOOO6Jp06Zx0EEHrffH1BYtWsShhx4aN954YxQXF5d5b8qUKVGvXr044YQTIuKbH6QiIsaMGRMPPvhgTJkyJbbddtvo3bt3hc8E+OMf/xiPPfZY/Pa3v42HHnqo3C3USsyfPz+aNWsWl112WTz88MNx9dVXR926deN73/tevPXWW+WWP//88+Odd96J6667Lq677rqYP39+9O7du8IgYk2XXnpp/PjHP46dd9457rjjjrj55ptj6dKlse+++8acOXPWu+66FBUVxVFHHRX9+vWL++67L/r27RvnnXdenH/++TFo0KA46aST4t57743OnTvH4MGD48UXXyy3jaFDh0adOnVKn3ny3HPPRe/evcs8J2D8+PExdOjQ2GWXXeKee+6JP/zhD/HKK69E9+7d4+233y6zvZUrV8Zhhx0W+++/f9x3330xbty4mDRpUvTs2TNat25d+vlb88fvuXPnximnnBJ33HFH3HPPPXHUUUfFGWecERdffHG5el9++eU466yz4swzz4z77rsvdttttxg6dGj84x//KF3mww8/jL333jvuvffeGDVqVDz00EMxceLE2GqrreKzzz6LiG+u7ujVq1fceOONMWLEiHjooYfil7/8ZUydOjUOO+ywcoHO2tKck5/85CdRr169cs94WL16ddxyyy3Rv3//aN68eUREpefQUUcdFdttt13ceeed8ac//anCGqs69x944IG47rrr4rLLLovbbrstli5dGv369YuzzjornnrqqbjqqqvimmuuiTlz5sTRRx9d5lxV9btgTWnm2LRp0+Lwww+Pxo0bx2233RbXX399fPbZZ9G7d+/45z//WW6bRx99dOywww5x9913x7nnnhvTpk2LM888s/T9JEniiCOOKA1A7r333vj+978fffv2LbetOXPmxN577x2vvfZa/O53v4sHHngg+vXrFyNGjCi9VdeaKvrOTPN91qZNm3j44Ycj4pt5WtK/khA6bR0fffRR9OrVK1577bWYNGlS3HzzzfHll1/G6aefnqofJY488sjYbrvt4q677oqxY8fG9OnT46CDDoqioqKIiPj5z38en332WUybNq3c+Xr88cfjtNNOW+/2//d//ze6d+8ekydPjkcffTQuvPDC+Ne//hX77LNP6T6GDRsWZ5xxRkRE3HPPPaXnZK+99op77703tt1229hzzz1Lx++9996IqPx8f/DBB+Oqq66Kiy66KO6+++7S0LTkM7i+OiqyfPny2G+//eKmm26KUaNGxYMPPhg/+clPYsKECXHUUUeVW35D+wcA2KAEAGALNGXKlCQiKvxTVFSUJEmSREQyZsyYcut26NAhGTRoUOnrO++8M4mI5PHHH9/gsiV69eqV9OrVq/T1448/nkRE8oMf/KDMcl999VXStGnTpH///mXGV69eney+++7J//zP/6z3OO+///4kIpJHH320dGzVqlVJ27Ztk6OPPnqd661atSopKipKDjjggOTII48sHX/33XeTiEi++93vJitXriyzTsl7U6ZMWe92V65cmWy//fbJmWeeWe7499prr6S4uLh0fO7cuUleXl4ybNiw0rExY8Yka/7P2Hnz5iV169ZNzjjjjDL7Wrp0adK6devkuOOOW2c9a+57zf4NGjQoiYjk7rvvLh0rKipKWrRokURE8u9//7t0fPHixUlubm4yatSo0rGSz9ea5y5JkuSpp55KIiK55JJLkiRJks8++yypX79+csghh5RZbt68eUl+fn4yYMCAcjXdcMMN5Y6hX79+SYcOHdZ7nEnyzeemqKgoueiii5JmzZqVOdcdOnRICgoKkvfee6907Ouvv06aNm2anHLKKaVjJ510UpKXl5fMmTNnnfsZP358UqdOneT5558vM37XXXclEZHMmDFjnetW5pwcddRRyTbbbJOsXr26dGzGjBlJRCR//etfkySp3Bwq+WxdeOGF66xvTeub+4MGDSrXk4hIWrdunXz55ZelY9OnT08iItljjz3K9GPixIlJRCSvvPJKpY+jImnn2OrVq5O2bdsmXbp0KXNely5dmrRs2TLp0aNH6VjJ+ZowYUKZfQ0fPjwpKCgo3c9DDz2URETyhz/8ocxyv/71r8t9zx500EHJNttsk3zxxRdllj399NOTgoKC5NNPPy1zPGt/Z1ZkXd9nH3/88Tq/59PW8ctf/jLJyclJZs+eXWa5Aw88cJ2fjTWVnMM1vw+TJEluvfXWJCKSW265pXSsV69eyR577FFmuZ/97GdJ48aNk6VLl653P2sqLi5OioqKkvfeey+JiOS+++4rfe/yyy9PIiJ59913y623yy67lPl7q0Rl5ntEJK1atUqWLFlSOrZw4cKkTp06yfjx41PVsfbfn3/605+SiEjuuOOOMsv95je/Kff3X9r9AwCsjytVAIAt2k033RTPP/98mT81eT/+o48+uszrp59+Oj799NMYNGhQmatpiouL4+CDD47nn39+vbdT6tu3b7Ru3TqmTJlSOvbII4/E/Pnz46STTiqz7J/+9KfYa6+9oqCgIOrWrRt5eXnx97//vdxtqCIiDjvssMjLy9vg8axatSouvfTS2HnnnaNevXpRt27dqFevXrz99tsVbnfAgAFlbl3VoUOH6NGjRzz++OPr3McjjzwSq1atioEDB5Y5RwUFBdGrV68Kr7RJIycnJw455JDS13Xr1o3tttsu2rRpE3vuuWfpeNOmTaNly5bx3nvvldtGyZVAJXr06BEdOnQoPZ5nnnkmvv7663K3iGvfvn3sv//+FV59sPZnZEMee+yx+OEPfxhbbbVV5ObmRl5eXlx44YWxePHiWLRoUZll99hjj/jOd75T+rqgoCB22GGHMsf20EMPxX777Rc77bTTOvf5wAMPxK677hp77LFHmZ4cdNBB5W61trbKnJMhQ4bEBx98EH/7299Kx6ZMmRKtW7cuvQqiKnOosue4Mvbbb79o0KBB6euS89i3b98yn/2S8ZJzv7HfBSU2NMfeeuutmD9/fpx44olRp87//d/Fhg0bxtFHHx3PPvtsuVtQHXbYYWVe77bbbrF8+fLSz1fJtteeDwMGDCjzevny5fH3v/89jjzyyCgsLCxznIccckgsX748nn322TLrrKtXlfk+W1tl6nj88cdjl112id133329x7Yha5+b4447LurWrVvmu+/nP/95zJ49O5566qmI+OaWWzfffHMMGjQoGjZsuN7tL1q0KE499dRo37596fno0KFDRESqc7I+lZ3v++23XzRq1Kj0datWrdb5HZrGY489Fg0aNIhjjjmmzHjJd8ja36PVvX8AYMvjCX4AwBZtp512ytSD6tu0aVPmdckzKtb+sWhNn376aZkfaddUt27dOPHEE+PKK6+Mzz//PJo0aRJTp06NNm3axEEHHVS63BVXXBFnnXVWnHrqqXHxxRdH8+bNIzc3N0aPHl3hD25r17kuo0aNiquvvjp++ctfRq9evWLrrbeOOnXqxLBhw+Lrr78ut3zr1q0rHHv55ZfXuY+Sc7T33ntX+P6aPwxXRmFhYRQUFJQZq1evXjRt2rTcsvXq1Yvly5eXG1/X8SxevDgiovSfFZ3Ptm3bxsyZM8vV1Lhx49TH8Nxzz0WfPn2id+/ece2118Y222wT9erVi+nTp8evf/3rcj1o1qxZuW3k5+eXWe7jjz+ObbbZZr37/eijj+K///3vOoO3NZ/jsLbKnJO+fftGmzZtYsqUKdGnT5/47LPP4v7774+f//znkZubW1pLROXmUNrPd1Ws/fmpV6/eesdLPlcb+11QYkNzbEPnv7i4OD777LMoLCwsHV/7c5Ofnx8RUfq5Wbx4cdStW7fccmvXsnjx4li1alVceeWVceWVV1ZY/9qfnYrqrOz32doqU8fixYujU6dO5d6v6Dyvz9rLl5yvkn5ERBx++OHRsWPHuPrqq6Nnz54xderU+OqrrzZ466/i4uLo06dPzJ8/P0aPHh1dunSJBg0aRHFxcXz/+9+v8Lu4Mio739N8z1TG4sWLo3Xr1mXCwoiIli1bRt26dcucw02xfwBgyyNUAQBYh/z8/HIPuY2Icj/QrE9BQUGF2/jkk09Kn/ewprV/FCpZ5sorr4zvf//7Fe6jVatW661hyJAhcfnll8df/vKXOP744+P++++PkSNHlv7oHPHNsxp69+4dkydPLrPuuh62vHad63LLLbfEwIEDS589UeKTTz6JJk2alFt+4cKFFY5V9CNYiZJzdNddd5X+l9dZsa7j2W677SLi/37cW7BgQbnl5s+fX+4zkva8l/jLX/4SeXl58cADD5QJiKZPn16p7aypRYsW8cEHH6x3mebNm0f9+vUrfMh9yfvrUplzkpubGyeeeGL88Y9/jM8//zymTZsWK1asiCFDhpTbV2XmUGXP87ehOr4LIjY8xzZ0/uvUqRNbb7116rpLtrlq1apYvHhxmbm8di1bb711aU/XFRSsHWBU1KvKfp+trTJ1NGvWbJ3ntDIWLlwY7dq1K31d0fmqU6dOnHbaaXH++efH7373u5g0aVIccMAB0blz5/Vu+7XXXouXX345pk6dGoMGDSod/+9//1upGtdlY+Z7dWjWrFn861//iiRJynweFi1aFKtWrdrk+wcAtjxCFQCAdejYsWO88sorZcYee+yx+PLLL8uMrf1fZW9oG//5z3/irbfeSvVDT8+ePaNJkyYxZ86cSj/4uMROO+0U3/ve92LKlCmxevXqcj86R3zzw2TJcZR45ZVX4plnnon27dtXab/r2u6DDz4YH374YWmwsKbbbrstRo0aVfrD2HvvvRdPP/10DBw4cJ37OOigg6Ju3brxv//7v5v0tk1Vceutt5ap6emnn4733nsvhg0bFhER3bt3j/r168ctt9wSxx57bOlyH3zwQTz22GPrvSphTev6r6xzcnKibt26ZQK0r7/+Om6++eaqHlL07ds3br755njrrbfW+WPuoYceGpdeemk0a9aswv+Kf30qe06GDBkSEyZMiNtuuy2mTp0a3bt3jx133LH0/eqYQ+uyvrlf3arrODY0xzp37hzt2rWLadOmxdlnn1263FdffRV33313dO/evcxVKmnst99+MWHChLj11ltjxIgRpeNrP3S9sLAw9ttvv3jppZdit912K71ap7LSfp+tq3+VqaPk2F5++eUytwBb+9g25NZbb42uXbuWvr7jjjti1apV0bt37zLLDRs2LMaOHRsnnHBCvPXWW/Gb3/xmg9su6eHa5+TPf/5zuWXX95le1/fMxsz3danM3DrggAPijjvuiOnTp8eRRx5ZOn7TTTeVvg8AUJ2EKgAA63DiiSfG6NGj48ILL4xevXrFnDlz4qqrroqtttqqzHK77rprRERcc8010ahRoygoKIhOnTpFs2bN4sQTT4yf/OQnMXz48Dj66KPjvffeiwkTJkSLFi1S1dCwYcO48sorY9CgQfHpp5/GMcccEy1btoyPP/44Xn755fj444/L/dfYFTnppJPilFNOifnz50ePHj3K/Rh+6KGHxsUXXxxjxoyJXr16xVtvvRUXXXRRdOrUKVatWpXyjJV36KGHxtSpU2PHHXeM3XbbLV588cW4/PLL13n7qEWLFsWRRx4ZJ598cnzxxRcxZsyYKCgoiPPOO2+d++jYsWNcdNFFccEFF8Q777wTBx98cGy99dbx0UcfxXPPPRcNGjSIcePGVfkYNsYLL7wQw4YNi2OPPTbef//9uOCCC6Jdu3YxfPjwiIho0qRJjB49Os4///wYOHBg/PjHP47FixfHuHHjoqCgIMaMGZNqP126dIl77rknJk+eHF27do06depEt27dol+/fnHFFVfEgAED4qc//WksXrw4fvvb35b7cbUyLrroonjooYfiBz/4QZx//vnRpUuX+Pzzz+Phhx+OUaNGxY477hgjR46Mu+++O37wgx/EmWeeGbvttlsUFxfHvHnz4tFHH42zzjorvve971W4/cqekx133DG6d+8e48ePj/fffz+uueaaMu9X1xyqyPrmfnWrruPY0ByrU6dOTJgwIU444YQ49NBD45RTTokVK1bE5ZdfHp9//nlcdtllla69T58+8YMf/CB+8YtfxFdffRXdunWLp556qsJw7w9/+EPss88+se+++8bPfvaz6NixYyxdujT++9//xl//+td47LHHNri/tN9njRo1ig4dOsR9990XBxxwQDRt2jSaN28eHTt2TF3HyJEj44Ybboh+/frFJZdcEq1atYpbb7013nzzzUqdo3vuuSfq1q0bBx54YLz++usxevTo2H333eO4444rs1yTJk1i4MCBMXny5OjQoUP0799/g9vecccd47vf/W6ce+65kSRJNG3aNP7617+Wu71gxDffJRHf9GHQoEGRl5cXnTt3jkaNGkWXLl3iL3/5S9x+++2x7bbbRkFBQXTp0mWj5vu6rK+OtQ0cODCuvvrqGDRoUMydOze6dOkS//znP+PSSy+NQw45JH74wx9Wat8AABtUzQ++BwCoFaZMmZJERPL888+vc5kVK1Ykv/jFL5L27dsn9evXT3r16pXMnj076dChQzJo0KAyy06cODHp1KlTkpubm0REMmXKlCRJkqS4uDiZMGFCsu222yYFBQVJt27dksceeyzp1atX0qtXr9L1H3/88SQikjvvvLPCWp544omkX79+SdOmTZO8vLykXbt2Sb9+/da5/Nq++OKLpH79+klEJNdee22Fx3r22Wcn7dq1SwoKCpK99tormT59ejJo0KCkQ4cOpcu9++67SUQkl19+ebltlLxXcuxJkiSfffZZMnTo0KRly5ZJYWFhss8++yRPPvnkOo//5ptvTkaMGJG0aNEiyc/PT/bdd9/khRdeKLOfMWPGJBX9z9jp06cn++23X9K4ceMkPz8/6dChQ3LMMcckf/vb39Z7bkr2/fjjj5eODRo0KGnQoEG5ZXv16pXssssu5cY7dOiQ9OvXr/R1yefr0UcfTU488cSkSZMmSf369ZNDDjkkefvtt8utf9111yW77bZbUq9evWSrrbZKDj/88OT1118vs8y6akqSJPn000+TY445JmnSpEmSk5NT5vzccMMNSefOnZP8/Pxk2223TcaPH59cf/31SUQk77777jqPYc1jXrNXSZIk77//fnLSSSclrVu3TvLy8pK2bdsmxx13XPLRRx+VLvPll18mv/rVr5LOnTuXHleXLl2SM888M1m4cGGFx1HZc1LimmuuSSIiqV+/fvLFF19UuEyaOVTy2fr44483WF+Jdc39tedOkiRJRCSnnXZambF1zal1fSdU9bugMnMsSb6ZT9/73veSgoKCpEGDBskBBxyQPPXUU2WWWdf5Kvn8r/n5+vzzz5OTTjopadKkSVJYWJgceOCByZtvvplERDJmzJhy5+Skk05K2rVrl+Tl5SUtWrRIevTokVxyySUbPD9Jkv77LEmS5G9/+1uy5557Jvn5+UlElPluT1NHkiTJnDlzkgMPPDApKChImjZtmgwdOjS57777yn2vVKTkHL744otJ//79k4YNGyaNGjVKfvzjH5eZT2uaNWtWEhHJZZddtt5tV1Rjo0aNkq233jo59thjk3nz5lV4/s8777ykbdu2SZ06dcocw9y5c5M+ffokjRo1SiKizLlMO98rmgNJklT49+q66qjoO2nx4sXJqaeemrRp0yapW7du0qFDh+S8885Lli9fXma5yuwfAGBdcpIkSTZ5cgMAAFuQqVOnxpAhQ+L555+Pbt261XQ5ELNmzYr99tsv7rzzztS3lSObzjrrrJg8eXK8//77m+SqKAAA1s/tvwAAACDjnn322fjPf/4TkyZNilNOOUWgAgBQQ4QqAAAAkHHdu3ePwsLCOPTQQ+OSSy6p6XIAALZYbv8FAAAAAACQQp2aLgAAAAAAAKA2EKoAAAAAAACkIFQBAAAAAABIYYt7UH1xcXHMnz8/GjVqFDk5OTVdDgAAAAAAUIOSJImlS5dG27Zto06d9V+LssWFKvPnz4/27dvXdBkAAAAAAECGvP/++7HNNtusd5ktLlRp1KhRRHxzcho3blzD1bA+RUVF8eijj0afPn0iLy+vpssBqshchs2DuQybB3MZNg/mMmwezGXIjiVLlkT79u1L84P12eJClZJbfjVu3FioknFFRUVRWFgYjRs39hcL1GLmMmwezGXYPJjLsHkwl2HzYC5D9qR5ZIgH1QMAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJDCFvdMFQAAAAAAqIwkSWLVqlWxevXqmi6FKsrLy4vc3NyN3o5QBQAAAAAA1mHlypWxYMGCWLZsWU2XwkbIycmJbbbZJho2bLhR2xGqAAAAAABABYqLi+Pdd9+N3NzcaNu2bdSrVy9ycnJquiwqKUmS+Pjjj+ODDz6I7bfffqOuWBGqAAAAAABABVauXBnFxcXRvn37KCwsrOly2AgtWrSIuXPnRlFR0UaFKh5UDwAAAAAA61Gnjp/Sa7vqusLIJwEAAAAAACAFoQoAAAAAAEAKnqkCAAAAAACV1PHcB7+1fc29rN+3tq+xY8fGuHHjIiLi97//fYwcOTIivrl91r333htHHHHEt1ZLWr17944nnngiIiJeeuml2GOPPTbZvlypAgAAAAAAlNpll11iwYIF8dOf/nST7uf111+Po48+Ojp27Bg5OTkxceLEcsuMHz8+9t5772jUqFG0bNkyjjjiiHjrrbfKLHPPPffEc889t0lrLSFUAQAAAAAAStWtWzdat24dhYWFm3Q/y5Yti2233TYuu+yyaN26dYXLPPHEE3HaaafFs88+GzNnzoxVq1ZFnz594quvvipdpmnTptGiRYtNWmsJoQoAAAAAAGxGbrrppmjWrFmsWLGizPjRRx8dAwcOrJZ9/PKXv4wddtghCgsLY9ttt43Ro0dHUVFRmWUuueSSaNmyZTRq1CiGDRsW5557bplbc+29995x+eWXx49+9KPIz8+vcD8PP/xwDB48OHbZZZfYfffdY8qUKTFv3rx48cUXq+U4KkuoAgAAAAAAm5Fjjz02Vq9eHffff3/p2CeffBIPPPBADBkypFr20ahRo5g6dWrMmTMn/vCHP8S1114bv//970vfv/XWW+PXv/51/OY3v4kXX3wxvvOd78TkyZM3er9ffPFFRHxzdUpNEKoAAAAAAMBmpH79+jFgwICYMmVK6ditt94a22yzTfTu3bta9vGrX/0qevToER07doz+/fvHWWedFXfccUfp+1deeWUMHTo0hgwZEjvssENceOGF0aVLl43aZ5IkMWrUqNhnn31i11133dhDqBKhCgAAAAAAbGZOPvnkePTRR+PDDz+MiIgpU6bE4MGDIycnp1q2f9ddd8U+++wTrVu3joYNG8bo0aNj3rx5pe+/9dZb8T//8z9l1ln7dWWdfvrp8corr8Rtt922UdvZGEIVAAAAAADYzOy5556x++67x0033RT//ve/49VXX43BgwdXy7afffbZ+NGPfhR9+/aNBx54IF566aW44IILYuXKlWWWWzvASZKkyvs844wz4v7774/HH388ttlmmypvZ2PVrbE9AwAAAAAAm8ywYcPi97//fXz44Yfxwx/+MNq3b18t233qqaeiQ4cOccEFF5SOvffee2WW6dy5czz33HNx4oknlo698MILld5XkiRxxhlnxL333huzZs2KTp06Vb3waiBUAQAAAACAzdAJJ5wQZ599dlx77bVx0003Vdt2t9tuu5g3b1785S9/ib333jsefPDBuPfee8ssc8YZZ8TJJ58c3bp1ix49esTtt98er7zySmy77baly6xcuTLmzJlT+u8ffvhhzJ49Oxo2bBjbbbddREScdtppMW3atLjvvvuiUaNGsXDhwoiI2GqrraJ+/frVdkxpCVUAAAAAAKCS5l7Wr6ZL2KDGjRvH0UcfHQ8++GAcccQR1bbdww8/PM4888w4/fTTY8WKFdGvX78YPXp0jB07tnSZE044Id555504++yzY/ny5XHcccfF4MGD47nnnitdZv78+bHnnnuWvv7tb38bv/3tb6NXr14xa9asiIiYPHlyRET07t27TA0lz4j5tglVAAAAAABgM7VgwYI44YQTIj8/f6O2s/bzUCZMmBATJkwoMzZy5Mgyr0ePHh2jR48ufX3ggQeWXoESEdGxY8cNPmdlY57Dsil4UD0AAAAAAGxmPv300/jLX/4Sjz32WJx22mmVWvfVV1+Nhg0bxqRJk6q8/2XLlsUVV1wRr7/+erz55psxZsyY+Nvf/haDBg2q8jbXpW/fvrHLLrtU+3Yr4koVAAAAAADYzOy1117x2WefxW9+85vo3Llz6fguu+xS7qHyJf785z/HiBEj4ic/+UlERLRo0aLK+8/JyYkZM2bEJZdcEitWrIjOnTvH3XffHT/84Q+rvM11ue666+Lrr7+OiIjvfOc71b79NQlVAAAAAABgMzN37twKx2fMmBFFRUUVvteqVato1KhRNG3adKP3X79+/fjb3/620dtJo127dt/KfiKEKgAAAAAAsMXo0KFDTZdQq3mmCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFKo0VDlH//4R/Tv3z/atm0bOTk5MX369A2u88QTT0TXrl2joKAgtt122/jTn/606QsFAAAAAAC2eDUaqnz11Vex++67x1VXXZVq+XfffTcOOeSQ2HfffeOll16K888/P0aMGBF33333Jq4UAAAAAACoirFjx8Yee+xR02VUi7o1ufO+fftG3759Uy//pz/9Kb7zne/ExIkTIyJip512ihdeeCF++9vfxtFHH13hOitWrIgVK1aUvl6yZElERBQVFUVRUVHVi2eTK+mPPkHtZi7D5sFchs2DuQybB3MZNg/mcu1QVFQUSZJEcXFxFBcXl3mvzkVbf2t1FF/42be2r42Vm5sbd999dxxxxBGlY0mSRESUO4ffpuLi4kiSJIqKiiI3N7fMe5WZhzUaqlTWM888E3369CkzdtBBB8X1118fRUVFkZeXV26d8ePHx7hx48qNP/roo1FYWLjJaqX6zJw5s6ZLAKqBuQybB3MZNg/mMmwezGXYPJjL2Va3bt1o3bp1fPnll7Fy5coy7zX5FusouVigtvj666/L1LxixYpYvXp1jR7HypUr4+uvv45//OMfsWrVqjLvLVu2LPV2alWosnDhwmjVqlWZsVatWsWqVavik08+iTZt2pRb57zzzotRo0aVvl6yZEm0b98++vTpE40bN97kNdc647ep6QpKFdUpiJld/hgHvjoi8oqX13Q5/+e8D2q6go2ToR5nVm3vcUSm+mwubyIZ6nFm1fYeR2Sqz+byJpKhHmdWbe9xRKb6bC5vIhnqcWbV9h5HZKrP5vImkqEeZ1Zt73FEpvqcybmsx+Usb7BNvN90fDRcXicKVuVU67Yro/GyuZVeZ/9jhsWuO24XuXVy46a7/hr18vJi3DnD44Sf/SLOOOOMuPvuu6Nly5bxxz/+sfSOUk888UT88pe/jJdffjmaNm0aAwcOjIsvvjjq1v0mSth///2jS5cuUVBQENdff33Uq1cvTjnllBgzZkxERGy77bYREfGTn/wkIiI6dOgQ77zzTuTn50dubm7cd999MWbMmPjss8/i4IMPjmuuuSYaNWpUDWdow5YvXx7169ePH/zgB1FQUFDmvcqEPbUqVImIyMkp+8EtuWxo7fES+fn5kZ+fX248Ly+vwitbtnhZ+QJfQ17x8uz8xRIRUds/N1k6l1lV23sckck+m8vVLEvnMqtqe48jMtlnc7maZelcZlVt73FEJvtsLlezLJ3LrKrtPY7IZJ/N5WqWpXOZVbW9xxGZ7HOm5rIel7M6WRE5kUSdSGr0AeV1IqnSejfd+UD84meD4rkHbo7b7380Tjvv0rh/1otx5JFHxgUXXBC///3vY9CgQTFv3rz47LPP4tBDD43BgwfHTTfdFG+++WacfPLJUb9+/Rg7duz/bfOmm2LUqFHxr3/9K5555pkYPHhw7LPPPnHggQfG888/Hy1btowpU6bEwQcfHLm5uVGnTp3IycmJ//3f/437778/Hnjggfjss8/iuOOOiwkTJsSvf/3rajpL61dSR0XZQGWyghp9UH1ltW7dOhYuXFhmbNGiRVG3bt1o1qxZDVUFAAAAAADZs/vO28evRg6L7bf9Tpx3xpCoX5AfzZs3j5NPPjm23377uPDCC2Px4sXxyiuvxKRJk6J9+/Zx1VVXxY477hhHHHFEjBs3Ln73u9+VeRbKbrvtFmPGjIntt98+Bg4cGN26dYu///3vERHRokWLiIho0qRJtG7duvR1xDfPNJk6dWrsuuuuse+++8aJJ55Yul5tUqtCle7du5e7x+Cjjz4a3bp1c9UJAAAAAACsYbedti/999zc3Gi29VbRpUuX0rGSx20sWrQo3njjjejevXuZu0L17Nkzvvzyy/jgg/+7Ndxuu+1WZh9t2rSJRYsWbbCWjh07lrnVV9r1sqZGQ5Uvv/wyZs+eHbNnz46IiHfffTdmz54d8+bNi4hvnocycODA0uVPPfXUeO+992LUqFHxxhtvxA033BDXX399nH322TVRPgAAAAAAZFZe3bJPACm5/dWaryO+uYokSZJUj99Y+wKHnJycMleyrLOWKq6XNTUaqrzwwgux5557xp577hkREaNGjYo999wzLrzwwoiIWLBgQWnAEhHRqVOnmDFjRsyaNSv22GOPuPjii+OPf/xjHH300TVSPwAAAAAAbA523nnnePrpp0uDlIiIp59+Oho1ahTt2rVLvZ28vLxYvXr1pigxE2r0QfW9e/cu06C1TZ06tdxYr1694t///vcmrAoAAAAAALYsw4cPj4kTJ8YZZ5wRp59+erz11lsxZsyYGDVqVNSpk/76jI4dO8bf//736NmzZ+Tn58fWW2+9Cav+9tVoqAIAAAAAALXST2fVdAXVql27djFjxow455xzYvfdd4+mTZvG0KFD41e/+lWltvO73/0uRo0aFddee220a9cu5s6du2kKriFCFQAAAAAA2MzMuuvacmNz//VgRNs9y4yteTepXr16xXPPPbfubc6aVW5s+vTpZV73798/+vfvX2Zs7NixMXbs2DJjI0eOjJEjR65zX1lVo89UAQAAAAAAqC2EKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAgIr8/4e4r/Esd2qppJqaKFQBAAAAAIAK5K34NGL1ylhWVNOVsLFWrlwZERG5ubkbtZ261VEMAAAAAABsbnJXLYsm7z0Ui+odExFNojAvIienpqvaSMuX13QF37ri4uL4+OOPo7CwMOrW3bhYRKgCAAAAAADr0PrtaRERsahD34jcejVcTTX46t2arqBG1KlTJ77zne9EzkamYkIVAAAAAABYh5xIos3bt0bLd+6JooJmtf9SldNfqOkKakS9evWiTp2NfyKKUAUAAAAAADYgd/XXkfvVBzVdxsYrKKjpCmo1D6oHAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAghRoPVSZNmhSdOnWKgoKC6Nq1azz55JPrXf7WW2+N3XffPQoLC6NNmzYxZMiQWLx48bdULQAAAAAAsKWq0VDl9ttvj5EjR8YFF1wQL730Uuy7777Rt2/fmDdvXoXL//Of/4yBAwfG0KFD4/XXX48777wznn/++Rg2bNi3XDkAAAAAALClqdFQ5YorroihQ4fGsGHDYqeddoqJEydG+/btY/LkyRUu/+yzz0bHjh1jxIgR0alTp9hnn33ilFNOiRdeeOFbrhwAAAAAANjS1K2pHa9cuTJefPHFOPfcc8uM9+nTJ55++ukK1+nRo0dccMEFMWPGjOjbt28sWrQo7rrrrujXr98697NixYpYsWJF6eslS5ZERERRUVEUFRVVw5FsZuoU1HQFpYr+fy1FGaopIiJq++cma+czi2p7jyMy1WdzeRPJ2vnMotre44hM9dlc3kSydj6zqLb3OCJTfTaXN5Gsnc8squ09jshUn83lTSRr5zOLanuPIzLV50zOZT3eMmwOfa5mlckKcpIkSTZhLes0f/78aNeuXTz11FPRo0eP0vFLL700brzxxnjrrbcqXO+uu+6KIUOGxPLly2PVqlVx2GGHxV133RV5eXkVLj927NgYN25cufFp06ZFYWFh9RwMAAAAAABQKy1btiwGDBgQX3zxRTRu3Hi9y9bYlSolcnJyyrxOkqTcWIk5c+bEiBEj4sILL4yDDjooFixYEOecc06ceuqpcf3111e4znnnnRejRo0qfb1kyZJo37599OnTZ4MnZ4s0fpuarqBUUZ2CmNnlj3HgqyMir3h5TZfzf877oKYr2DgZ6nFm1fYeR2Sqz+byJpKhHmdWbe9xRKb6bC5vIhnqcWbV9h5HZKrP5vImkqEeZ1Zt73FEpvpsLm8iGepxZtX2Hkdkqs+ZnMt6vGXYHPpczUrucJVGjYUqzZs3j9zc3Fi4cGGZ8UWLFkWrVq0qXGf8+PHRs2fPOOeccyIiYrfddosGDRrEvvvuG5dcckm0adOm3Dr5+fmRn59fbjwvL2+dV7ds0bLyBb6GvOLl2fmLJSKitn9usnQus6q29zgik302l6tZls5lVtX2Hkdkss/mcjXL0rnMqtre44hM9tlcrmZZOpdZVdt7HJHJPpvL1SxL5zKranuPIzLZ50zNZT3eMmwOfa5mlckKauxB9fXq1YuuXbvGzJkzy4zPnDmzzO3A1rRs2bKoU6dsybm5uRHxzRUuAAAAAAAAm0qNhSoREaNGjYrrrrsubrjhhnjjjTfizDPPjHnz5sWpp54aEd/cumvgwIGly/fv3z/uueeemDx5crzzzjvx1FNPxYgRI+J//ud/om3btjV1GAAAAAAAwBagRp+pcvzxx8fixYvjoosuigULFsSuu+4aM2bMiA4dOkRExIIFC2LevHmlyw8ePDiWLl0aV111VZx11lnRpEmT2H///eM3v/lNTR0CAAAAAACwhajxB9UPHz48hg8fXuF7U6dOLTd2xhlnxBlnnLGJqwIAAAAAACirRm//BQAAAAAAUFsIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFKoW9MFkC0dl0+r6RJK5ecmMSFWx64rro8Vq3NqupxSc2u6gI2UpR5n1dyaLqAaZKnP5vKmkaUeZ9Xcmi6gGmSpz+byppGlHmfV3JouoBpkqc/m8qaRpR5n1dyaLqAaZKnP5vKmkaUeZ9Xcmi6gGmSpz1mcy3NruoBqkKUeZ9Xcmi6glnOlCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkUOOhyqRJk6JTp05RUFAQXbt2jSeffHK9y69YsSIuuOCC6NChQ+Tn58d3v/vduOGGG76lagEAAAAAgC1V3Zrc+e233x4jR46MSZMmRc+ePePPf/5z9O3bN+bMmRPf+c53KlznuOOOi48++iiuv/762G677WLRokWxatWqb7lyAAAAAABgS1OjocoVV1wRQ4cOjWHDhkVExMSJE+ORRx6JyZMnx/jx48st//DDD8cTTzwR77zzTjRt2jQiIjp27PhtlgwAAAAAAGyhaixUWblyZbz44otx7rnnlhnv06dPPP300xWuc//990e3bt1iwoQJcfPNN0eDBg3isMMOi4svvjjq169f4TorVqyIFStWlL5esmRJREQUFRVFUVFRNR3N5iM/N6npEkrl10nK/DMravvnJks9zqra3uOIbPXZXN40stTjrKrtPY7IVp/N5U0jSz3Oqtre44hs9dlc3jSy1OOsqu09jshWn83lTSNLPc6q2t7jiGz1OYtzWY+3DJtDn6tbZc5JTpIkNfIpmz9/frRr1y6eeuqp6NGjR+n4pZdeGjfeeGO89dZb5dY5+OCDY9asWfHDH/4wLrzwwvjkk09i+PDhsf/++6/zuSpjx46NcePGlRufNm1aFBYWVt8BAQAAAAAAtc6yZctiwIAB8cUXX0Tjxo3Xu2yN3v4rIiInJ6fM6yRJyo2VKC4ujpycnLj11ltjq622iohvbiF2zDHHxNVXX13h1SrnnXdejBo1qvT1kiVLon379tGnT58Nnpwt0a5jH6npEkrl10ni4m7FMfqFOrGiuOLPRE14bexBNV3CRslSj7Oqtvc4Ilt9Npc3jSz1OKtqe48jstVnc3nTyFKPs6q29zgiW302lzeNLPU4q2p7jyOy1WdzedPIUo+zqrb3OCJbfc7iXNbjLcPm0OfqVnKHqzRqLFRp3rx55ObmxsKFC8uML1q0KFq1alXhOm3atIl27dqVBioRETvttFMkSRIffPBBbL/99uXWyc/Pj/z8/HLjeXl5kZeXt5FHsflZsTobX+BrWlGck6m6avvnJkvnMqtqe48jstlnc7l6ZelcZlVt73FENvtsLlevLJ3LrKrtPY7IZp/N5eqVpXOZVbW9xxHZ7LO5XL2ydC6zqrb3OCKbfc7SXNbjLcPm0OfqVplzUmcT1rFe9erVi65du8bMmTPLjM+cObPM7cDW1LNnz5g/f358+eWXpWP/+c9/ok6dOrHNNtts0noBAAAAAIAtW42FKhERo0aNiuuuuy5uuOGGeOONN+LMM8+MefPmxamnnhoR39y6a+DAgaXLDxgwIJo1axZDhgyJOXPmxD/+8Y8455xz4qSTTlrng+oBAAAAAACqQ40+U+X444+PxYsXx0UXXRQLFiyIXXfdNWbMmBEdOnSIiIgFCxbEvHnzSpdv2LBhzJw5M84444zo1q1bNGvWLI477ri45JJLauoQAAAAAACALUSNP6h++PDhMXz48Arfmzp1armxHXfcsdwtwwAAAAAAADa1Gr39FwAAAAAAQG0hVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAghSqHKjfffHP07Nkz2rZtG++9915EREycODHuu+++aisOAAAAAAAgK6oUqkyePDlGjRoVhxxySHz++eexevXqiIho0qRJTJw4sTrrAwAAAAAAyIQqhSpXXnllXHvttXHBBRdEbm5u6Xi3bt3i1VdfrbbiAAAAAAAAsqJKocq7774be+65Z7nx/Pz8+Oqrrza6KAAAAAAAgKypUqjSqVOnmD17drnxhx56KHbeeeeNrQkAAAAAACBz6lZlpXPOOSdOO+20WL58eSRJEs8991zcdtttMX78+Ljuuuuqu0YAAAAAAIAaV6VQZciQIbFq1ar4xS9+EcuWLYsBAwZEu3bt4g9/+EP86Ec/qu4aAQAAAAAAalyVQpWIiJNPPjlOPvnk+OSTT6K4uDhatmxZnXUBAAAAAABkSpVClXfffTdWrVoV22+/fTRv3rx0/O233468vLzo2LFjddUHAAAAAACQCVV6UP3gwYPj6aefLjf+r3/9KwYPHryxNQEAAAAAAGROlUKVl156KXr27Flu/Pvf/37Mnj17Y2sCAAAAAADInCqFKjk5ObF06dJy41988UWsXr16o4sCAAAAAADImiqFKvvuu2+MHz++TICyevXqGD9+fOyzzz7VVhwAAAAAAEBWVOlB9RMmTIgf/OAH0blz59h3330jIuLJJ5+MJUuWxGOPPVatBQIAAAAAAGRBla5U2XnnneOVV16J4447LhYtWhRLly6NgQMHxptvvhm77rprddcIAAAAAABQ46p0pUpERNu2bePSSy+tzloAAAAAAAAyq8qhyueffx7PPfdcLFq0KIqLi8u8N3DgwI0uDAAAAAAAIEuqFKr89a9/jRNOOCG++uqraNSoUeTk5JS+l5OTI1QBAAAAAAA2O1V6pspZZ50VJ510UixdujQ+//zz+Oyzz0r/fPrpp9VdIwAAAAAAQI2rUqjy4YcfxogRI6KwsLC66wEAAAAAAMikKoUqBx10ULzwwgvVXQsAAAAAAEBmVemZKv369Ytzzjkn5syZE126dIm8vLwy7x922GHVUhwAAAAAAEBWVClUOfnkkyMi4qKLLir3Xk5OTqxevXrjqgIAAAAAAMiYKoUqxcXF1V0HAAAAAABAplXpmSoAAAAAAABbmipdqRIR8dVXX8UTTzwR8+bNi5UrV5Z5b8SIERtdGAAAAAAAQJZUKVR56aWX4pBDDolly5bFV199FU2bNo1PPvkkCgsLo2XLlkIVAAAAAABgs1Ol23+deeaZ0b9///j000+jfv368eyzz8Z7770XXbt2jd/+9rfVXSMAAAAAAECNq1KoMnv27DjrrLMiNzc3cnNzY8WKFdG+ffuYMGFCnH/++dVdIwAAAAAAQI2rUqiSl5cXOTk5ERHRqlWrmDdvXkREbLXVVqX/DgAAAAAAsDmp0jNV9txzz3jhhRdihx12iP322y8uvPDC+OSTT+Lmm2+OLl26VHeNAAAAAAAANa5KV6pceuml0aZNm4iIuPjii6NZs2bxs5/9LBYtWhR//vOfq7VAAAAAAACALKjSlSrdunUr/fcWLVrEjBkzqq0gAAAAAACALKrSlSr7779/fP755+XGlyxZEvvvv//G1gQAAAAAAJA5VQpVZs2aFStXriw3vnz58njyySc3uigAAAAAAICsqdTtv1555ZXSf58zZ04sXLiw9PXq1avj4Ycfjnbt2lVfdQAAAAAAABlRqVBljz32iJycnMjJyanwNl/169ePK6+8stqKAwAAAAAAyIpKhSrvvvtuJEkS2267bTz33HPRokWL0vfq1asXLVu2jNzc3GovEgAAAAAAoKZVKlTp0KFDFBUVxcCBA6Np06bRoUOHTVUXAAAAAABAplT6QfV5eXlx3333bYpaAAAAAAAAMqvSoUpExBFHHBHTp0+v5lIAAAAAAACyq1K3/yqx3XbbxcUXXxxPP/10dO3aNRo0aFDm/REjRlRLcQAAAAAAAFlRpVDluuuuiyZNmsSLL74YL774Ypn3cnJyhCoAAAAAAMBmp0qhyrvvvlvddQAAAAAAAGRalZ6psqYkSSJJkuqoBQAAAAAAILOqHKrcdNNN0aVLl6hfv37Ur18/dtttt7j55purszYAAAAAAIDMqNLtv6644ooYPXp0nH766dGzZ89IkiSeeuqpOPXUU+OTTz6JM888s7rrBAAAAAAAqFFVClWuvPLKmDx5cgwcOLB07PDDD49ddtklxo4dK1QBAAAAAAA2O1W6/deCBQuiR48e5cZ79OgRCxYs2OiiAAAAAAAAsqZKocp2220Xd9xxR7nx22+/PbbffvuNLgoAAAAAACBrqnT7r3HjxsXxxx8f//jHP6Jnz56Rk5MT//znP+Pvf/97hWELAAAAAABAbVelK1WOPvro+Ne//hXNmzeP6dOnxz333BPNmzeP5557Lo488sjqrhEAAAAAAKDGVelKlYiIrl27xi233FKdtQAAAAAAAGRWlUOV1atXx7333htvvPFG5OTkxE477RSHH3541K1b5U0CAAAAAABkVpUSkNdeey0OP/zwWLhwYXTu3DkiIv7zn/9EixYt4v77748uXbpUa5EAAAAAAAA1rUrPVBk2bFjssssu8cEHH8S///3v+Pe//x3vv/9+7LbbbvHTn/60umsEAAAAAACocVW6UuXll1+OF154IbbeeuvSsa233jp+/etfx957711txQEAAAAAAGRFla5U6dy5c3z00UflxhctWhTbbbfdRhcFAAAAAACQNVUKVS699NIYMWJE3HXXXfHBBx/EBx98EHfddVeMHDkyfvOb38SSJUtK/wAAAAAAAGwOqnT7r0MPPTQiIo477rjIycmJiIgkSSIion///qWvc3JyYvXq1dVRJwAAAAAAQI2qUqjy+OOPV3cdAAAAAAAAmValUKVXr17VXQcAAAAAAECmVSlUiYhYvnx5vPLKK7Fo0aIoLi4u895hhx220YUBAAAAAABkSZVClYcffjgGDhwYn3zySbn3PEcFAAAAAADYHNWpykqnn356HHvssbFgwYIoLi4u80egAgAAAAAAbI6qFKosWrQoRo0aFa1ataruegAAAAAAADKpSqHKMcccE7NmzarmUgAAAAAAALKrSs9Uueqqq+LYY4+NJ598Mrp06RJ5eXll3h8xYkS1FAcAAAAAAJAVVQpVpk2bFo888kjUr18/Zs2aFTk5OaXv5eTkCFUAAAAAAIDNTpVClV/96ldx0UUXxbnnnht16lTpDmIAAAAAAAC1SpUSkZUrV8bxxx8vUAEAAAAAALYYVUpFBg0aFLfffnt11wIAAAAAAJBZVbr91+rVq2PChAnxyCOPxG677VbuQfVXXHFFtRQHAAAAAACQFVUKVV599dXYc889IyLitddeq9aCAAAAAAAAsqhKocrjjz9e3XUAAAAAAABkWqVClaOOOmqDy+Tk5MTdd99d5YIAAAAAAACyqFKhylZbbbWp6gAAAAAAAMi0SoUqU6ZM2VR1AAAAAAAAZFqdmi4AAAAAAACgNhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACjUeqkyaNCk6deoUBQUF0bVr13jyySdTrffUU09F3bp1Y4899ti0BQIAAAAAAEQNhyq33357jBw5Mi644IJ46aWXYt99942+ffvGvHnz1rveF198EQMHDowDDjjgW6oUAAAAAADY0tWtyZ1fccUVMXTo0Bg2bFhEREycODEeeeSRmDx5cowfP36d651yyikxYMCAyM3NjenTp693HytWrIgVK1aUvl6yZElERBQVFUVRUdHGH8RmJj83qekSSuXXScr8Mytq++cmSz3Oqtre44hs9dlc3jSy1OOsqu09jshWn83lTSNLPc6q2t7jiGz12VzeNLLU46yq7T2OyFafzeVNI0s9zqra3uOIbPU5i3NZj7cMm0Ofq1tlzklOkiQ18ilbuXJlFBYWxp133hlHHnlk6fjPf/7zmD17djzxxBMVrjdlypSYNGlSPPPMM3HJJZfE9OnTY/bs2evcz9ixY2PcuHHlxqdNmxaFhYUbfRwAAAAAAEDttWzZshgwYEB88cUX0bhx4/UuW2NXqnzyySexevXqaNWqVZnxVq1axcKFCytc5+23345zzz03nnzyyahbN13p5513XowaNar09ZIlS6J9+/bRp0+fDZ6cLdGuYx+p6RJK5ddJ4uJuxTH6hTqxojinpssp9drYg2q6hI2SpR5nVW3vcUS2+mwubxpZ6nFW1fYeR2Srz+byppGlHmdVbe9xRLb6bC5vGlnqcVbV9h5HZKvP5vKmkaUeZ1Vt73FEtvqcxbmsx1uGzaHP1a3kDldp1OjtvyIicnLKfmEkSVJuLCJi9erVMWDAgBg3blzssMMOqbefn58f+fn55cbz8vIiLy+v8gVv5laszsYX+JpWFOdkqq7a/rnJ0rnMqtre44hs9tlcrl5ZOpdZVdt7HJHNPpvL1StL5zKranuPI7LZZ3O5emXpXGZVbe9xRDb7bC5Xryydy6yq7T2OyGafszSX9XjLsDn0ubpV5pzUWKjSvHnzyM3NLXdVyqJFi8pdvRIRsXTp0njhhRfipZdeitNPPz0iIoqLiyNJkqhbt248+uijsf/++38rtQMAAAAAAFueOjW143r16kXXrl1j5syZZcZnzpwZPXr0KLd848aN49VXX43Zs2eX/jn11FOjc+fOMXv27Pje9773bZUOAAAAAABsgWr09l+jRo2KE088Mbp16xbdu3ePa665JubNmxennnpqRHzzPJQPP/wwbrrppqhTp07suuuuZdZv2bJlFBQUlBsHAAAAAACobjUaqhx//PGxePHiuOiii2LBggWx6667xowZM6JDhw4REbFgwYKYN29eTZYIAAAAAAAQERl4UP3w4cNj+PDhFb43derU9a47duzYGDt2bPUXBQAAAAAAsJYae6YKAAAAAABAbSJUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAAp1HioMmnSpOjUqVMUFBRE165d48knn1znsvfcc08ceOCB0aJFi2jcuHF07949HnnkkW+xWgAAAAAAYEtVo6HK7bffHiNHjowLLrggXnrppdh3332jb9++MW/evAqX/8c//hEHHnhgzJgxI1588cXYb7/9on///vHSSy99y5UDAAAAAABbmhoNVa644ooYOnRoDBs2LHbaaaeYOHFitG/fPiZPnlzh8hMnToxf/OIXsffee8f2228fl156aWy//fbx17/+9VuuHAAAAAAA2NLUrakdr1y5Ml588cU499xzy4z36dMnnn766VTbKC4ujqVLl0bTpk3XucyKFStixYoVpa+XLFkSERFFRUVRVFRUhco3b/m5SU2XUCq/TlLmn1lR2z83WepxVtX2Hkdkq8/m8qaRpR5nVW3vcUS2+mwubxpZ6nFW1fYeR2Srz+byppGlHmdVbe9xRLb6bC5vGlnqcVbV9h5HZKvPWZzLerxl2Bz6XN0qc05ykiSpkU/Z/Pnzo127dvHUU09Fjx49SscvvfTSuPHGG+Ott97a4DYuv/zyuOyyy+KNN96Ili1bVrjM2LFjY9y4ceXGp02bFoWFhVU/AAAAAAAAoNZbtmxZDBgwIL744oto3LjxepetsStVSuTk5JR5nSRJubGK3HbbbTF27Ni477771hmoREScd955MWrUqNLXS5Ysifbt20efPn02eHK2RLuOfaSmSyiVXyeJi7sVx+gX6sSK4g1/Jr4tr409qKZL2ChZ6nFW1fYeR2Srz+byppGlHmdVbe9xRLb6bC5vGlnqcVbV9h5HZKvP5vKmkaUeZ1Vt73FEtvpsLm8aWepxVtX2Hkdkq89ZnMt6vGXYHPpc3UrucJVGjYUqzZs3j9zc3Fi4cGGZ8UWLFkWrVq3Wu+7tt98eQ4cOjTvvvDN++MMfrnfZ/Pz8yM/PLzeel5cXeXl5lS98M7didTa+wNe0ojgnU3XV9s9Nls5lVtX2Hkdks8/mcvXK0rnMqtre44hs9tlcrl5ZOpdZVdt7HJHNPpvL1StL5zKranuPI7LZZ3O5emXpXGZVbe9xRDb7nKW5rMdbhs2hz9WtMuekxh5UX69evejatWvMnDmzzPjMmTPL3A5sbbfddlsMHjw4pk2bFv369dvUZQIAAAAAAEREDd/+a9SoUXHiiSdGt27donv37nHNNdfEvHnz4tRTT42Ib27d9eGHH8ZNN90UEd8EKgMHDow//OEP8f3vf7/0Kpf69evHVlttVWPHAQAAAAAAbP5qNFQ5/vjjY/HixXHRRRfFggULYtddd40ZM2ZEhw4dIiJiwYIFMW/evNLl//znP8eqVavitNNOi9NOO610fNCgQTF16tRvu3wAAAAAAGALUuMPqh8+fHgMHz68wvfWDkpmzZq16QsCAAAAAACoQI09UwUAAAAAAKA2EaoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqAIAAAAAAJCCUAUAAAAAACAFoQoAAAAAAEAKQhUAAAAAAIAUhCoAAAAAAAApCFUAAAAAAABSEKoAAAAAAACkIFQBAAAAAABIQagCAAAAAACQglAFAAAAAAAgBaEKAAAAAABACkIVAAAAAACAFIQqAAAAAAAAKQhVAAAAAAAAUhCqAAAAAAAApCBUAQAAAAAASEGoAgAAAAAAkIJQBQAAAAAAIAWhCgAAAAAAQApCFQAAAAAAgBSEKgAAAAAAACkIVQAAAAAAAFIQqgAAAAAAAKQgVAEAAAAAAEhBqPL/2rvXGCnv+n3A97IUWKTLaSkHORR7CoKRuk0IRWJBhVYwmDakalMhHHRTTW1JNQUMByO8gVQEtW0stFVJig0vGu0hYKiFAtGItFFEDAgusiAFaktK5bTze/FP9y9C4aECsyzXlWzCPPOd2fvJN5/AcM/MAwAAAAAAUIBSBQAAAAAAoAClCgAAAAAAQAFKFQAAAAAAgAKUKgAAAAAAAAUoVQAAAAAAAApQqgAAAAAAABSgVAEAAAAAAChAqQIAAAAAAFCAUgUAAAAAAKAApQoAAAAAAEABShUAAAAAAIAClCoAAAAAAAAFKFUAAAAAAAAKUKoAAAAAAAAUoFQBAAAAAAAoQKkCAAAAAABQgFIFAAAAAACgAKUKAAAAAABAAUoVAAAAAACAApQqAAAAAAAABShVAAAAAAAAClCqAAAAAAAAFKBUAQAAAAAAKECpAgAAAAAAUIBSBQAAAAAAoAClCgAAAAAAQAFKFQAAAAAAgAKUKgAAAAAAAAUoVQAAAAAAAApQqgAAAAAAABSgVAEAAAAAAChAqQIAAAAAAFCAUgUAAAAAAKAApQoAAAAAAEABShUAAAAAAIAClCoAAAAAAAAFKFUAAAAAAAAKUKoAAAAAAAAUoFQBAAAAAAAoQKkCAAAAAABQgFIFAAAAAACgAKUKAAAAAABAAUoVAAAAAACAApQqAAAAAAAABZS9VPnxj3+c/v37p127dqmtrc26devOuv6VV15JbW1t2rVrl4985CN57LHHLlFSAAAAAADgSlbWUmXFihV54IEHMnPmzGzevDnDhw/PHXfckfr6+jOu37lzZz73uc9l+PDh2bx5c2bMmJH7778/K1euvMTJAQAAAACAK01ZS5VHHnkkkydPzpQpUzJgwIAsWrQoffr0yaOPPnrG9Y899lj69u2bRYsWZcCAAZkyZUomTZqUhQsXXuLkAAAAAADAlaZ1uX7xsWPHsmnTpjz88MOnHB81alQ2bNhwxsds3Lgxo0aNOuXY6NGjs3Tp0hw/fjxXXXXVaY85evRojh492nT7rbfeSpIcOnQox48f/19Po8VpfeKdckdo0rqxlCNHGtP6eKucbKwod5wmBw8eLHeE/0lz2uPm6nLf46R57bNZvjia0x43V5f7HifNa5/N8sXRnPa4ubrc9zhpXvtsli+O5rTHzdXlvsdJ89pns3xxNKc9bq4u9z1Omtc+N8dZtsdXhpawzxfa4cOHkySlUumcaytKRVZdBA0NDfnwhz+c9evX59Zbb206Pn/+/Dz99NPZtm3baY+58cYbM3HixMyYMaPp2IYNGzJs2LA0NDSkZ8+epz1mzpw5mTt37sU5CQAAAAAAoEXYvXt3evfufdY1ZfukynsqKk5tYUul0mnHzrX+TMffM3369EybNq3pdmNjYw4dOpSuXbue9fdQfm+//Xb69OmT3bt3p7q6utxxgA/ILEPLYJahZTDL0DKYZWgZzDI0H6VSKYcPH06vXr3OubZspUpNTU0qKyuzb9++U47v378/3bt3P+NjevToccb1rVu3TteuXc/4mLZt26Zt27anHOvUqdMHD84lV11d7S8WaAHMMrQMZhlaBrMMLYNZhpbBLEPz0LFjx0Lrynah+jZt2qS2tjarV68+5fjq1atP+Tqw/zR06NDT1q9atSq33HLLGa+nAgAAAAAAcKGUrVRJkmnTpuWJJ57IsmXLsnXr1jz44IOpr69PXV1dkv/31V1f+cpXmtbX1dXl73//e6ZNm5atW7dm2bJlWbp0aR566KFynQIAAAAAAHCFKOs1Ve6+++4cPHgw3/3ud7N3794MGjQoL7zwQvr165ck2bt3b+rr65vW9+/fPy+88EIefPDB/OhHP0qvXr2yePHi3HXXXeU6BS6itm3bZvbs2ad9fRtweTHL0DKYZWgZzDK0DGYZWgazDJenitJ7V3oHAAAAAADgfZX1678AAAAAAAAuF0oVAAAAAACAApQqAAAAAAAABShVAAAAAAAAClCq0CzNmzcvt956a9q3b59OnTqddv/rr7+eL33pS+nTp0+qqqoyYMCA/OAHP7j0QYGzOtcsJ0l9fX0+//nP50Mf+lBqampy//3359ixY5c2KHBe/vrXv2bcuHGpqalJdXV1hg0blpdffrncsYAP4Pnnn8+QIUNSVVWVmpqa3HnnneWOBHxAR48ezeDBg1NRUZHXXnut3HGA87Br165Mnjw5/fv3T1VVVa677rrMnj3ba2NoplqXOwCcybFjxzJ+/PgMHTo0S5cuPe3+TZs2pVu3bvn5z3+ePn36ZMOGDfnqV7+aysrKfOMb3yhDYuBMzjXLJ0+ezJgxY9KtW7e8+uqrOXjwYCZMmJBSqZQlS5aUITFQxJgxY3LjjTdmzZo1qaqqyqJFizJ27Njs2LEjPXr0KHc8oKCVK1dm6tSpmT9/fkaOHJlSqZQ//vGP5Y4FfEDf/va306tXr7z++uvljgKcp7/85S9pbGzM448/nuuvvz5/+tOfMnXq1LzzzjtZuHBhueMB/6WiVCqVyh0C3s9TTz2VBx54IP/617/OufbrX/96tm7dmjVr1lz8YMB5eb9ZfvHFFzN27Njs3r07vXr1SpI888wzmThxYvbv35/q6uoypAXO5sCBA+nWrVvWrl2b4cOHJ0kOHz6c6urq/PrXv86nP/3pMicEijhx4kSuvfbazJ07N5MnTy53HOB/9OKLL2batGlZuXJlBg4cmM2bN2fw4MHljgX8DxYsWJBHH300f/vb38odBfgvvv6LFuOtt95Kly5dyh0DOA8bN27MoEGDmgqVJBk9enSOHj2aTZs2lTEZ8H66du2aAQMG5Kc//WneeeednDhxIo8//ni6d++e2tracscDCvrDH/6QPXv2pFWrVrn55pvTs2fP3HHHHdmyZUu5owHn6Z///GemTp2an/3sZ2nfvn254wAXiP/nguZLqUKLsHHjxvziF7/I1772tXJHAc7Dvn370r1791OOde7cOW3atMm+ffvKlAo4m4qKiqxevTqbN2/O1VdfnXbt2uX73/9+Xnrppfe9dhLQ/Lz3rtc5c+bkO9/5Tn71q1+lc+fO+dSnPpVDhw6VOR1QVKlUysSJE1NXV5dbbrml3HGAC2THjh1ZsmRJ6urqyh0FOAOlCpfMnDlzUlFRcdaf3//+9+f9vFu2bMm4ceMya9asfPazn70IyYH/dKFnuaKi4rRjpVLpjMeBi6fobJdKpdx333255pprsm7duvzud7/LuHHjMnbs2Ozdu7fcpwFXvKKz3NjYmCSZOXNm7rrrrtTW1ubJJ59MRUVFnn322TKfBVB0lpcsWZK3334706dPL3dk4Aw+yOvnhoaG3H777Rk/fnymTJlSpuTA2bimCpfMgQMHcuDAgbOuufbaa9OuXbum2+e6psqf//znjBgxIlOmTMm8efMuZFzgfVzIWZ41a1aee+65Uy6m+eabb6ZLly5Zs2ZNRowYcUGzA++v6GyvX78+o0aNyptvvnnKdY9uuOGGTJ48OQ8//PDFjgqcRdFZ3rhxY0aOHJl169blk5/8ZNN9Q4YMyWc+8xn/toYyKzrLX/ziF/PLX/7ylDcknTx5MpWVlbnnnnvy9NNPX+yowFmc7+vnhoaGjBgxIkOGDMlTTz2VVq28Hx6ao9blDsCVo6amJjU1NRfs+bZs2ZKRI0dmwoQJXvTBJXQhZ3no0KGZN29e9u7dm549eyZJVq1albZt27o2A1xiRWf7yJEjSXLaC7xWrVo1vfMdKJ+is1xbW5u2bdtm27ZtTaXK8ePHs2vXrvTr1+9ixwTOoegsL168ON/73veabjc0NGT06NFZsWJFhgwZcjEjAgWcz+vnPXv2ZMSIEU2fHlWoQPOlVKFZqq+vz6FDh1JfX5+TJ0/mtddeS5Jcf/316dChQ7Zs2ZIRI0Zk1KhRmTZtWtO1FyorK9OtW7cyJgf+07lmedSoUfnoRz+ae++9NwsWLMihQ4fy0EMPZerUqae8Ax5oPoYOHZrOnTtnwoQJmTVrVqqqqvKTn/wkO3fuzJgxY8odDyiouro6dXV1mT17dvr06ZN+/fplwYIFSZLx48eXOR1QVN++fU+53aFDhyTJddddl969e5cjEvABNDQ05Lbbbkvfvn2zcOHCvPHGG0339ejRo4zJgDNRqtAszZo165SPKd98881Jkpdffjm33XZbnn322bzxxhtZvnx5li9f3rSuX79+2bVr16WOC7yPc81yZWVlnn/++dx3330ZNmxYqqqq8uUvfzkLFy4sV2TgHGpqavLSSy9l5syZGTlyZI4fP56BAwfmueeey8c//vFyxwPOw4IFC9K6devce++9effddzNkyJCsWbMmnTt3Lnc0ALiirFq1Ktu3b8/27dtPK0RduQGaH9dUAQAAAAAAKMCX8wEAAAAAABSgVAEAAAAAAChAqQIAAAAAAFCAUgUAAAAAAKAApQoAAAAAAEABShUAAAAAAIAClCoAAAAAAAAFKFUAAAAAAAAKUKoAAAAt1pw5czJ48OByxwAAAFqIilKpVCp3CAAAgPNVUVFx1vsnTJiQH/7whzl69Gi6du16iVIBAAAtmVIFAAC4LO3bt6/pzytWrMisWbOybdu2pmNVVVXp2LFjOaIBAAAtlK//AgAALks9evRo+unYsWMqKipOO/bfX/81ceLEfOELX8j8+fPTvXv3dOrUKXPnzs2JEyfyrW99K126dEnv3r2zbNmyU37Xnj17cvfdd6dz587p2rVrxo0bl127dl3aEwYAAMpOqQIAAFxR1qxZk4aGhqxduzaPPPJI5syZk7Fjx6Zz58757W9/m7q6utTV1WX37t1JkiNHjmTEiBHp0KFD1q5dm1dffTUdOnTI7bffnmPHjpX5bAAAgEtJqQIAAFxRunTpksWLF+emm27KpEmTctNNN+XIkSOZMWNGbrjhhkyfPj1t2rTJ+vXrkyTPPPNMWrVqlSeeeCIf+9jHMmDAgDz55JOpr6/Pb37zm/KeDAAAcEm1LncAAACAS2ngwIFp1er/v7+se/fuGTRoUNPtysrKdO3aNfv370+SbNq0Kdu3b8/VV199yvP8+9//zo4dOy5NaAAAoFlQqgAAAFeUq6666pTbFRUVZzzW2NiYJGlsbExtbW2WL19+2nN169bt4gUFAACaHaUKAADAWXziE5/IihUrcs0116S6urrccQAAgDJyTRUAAICzuOeee1JTU5Nx48Zl3bp12blzZ1555ZV885vfzD/+8Y9yxwMAAC4hpQoAAMBZtG/fPmvXrk3fvn1z5513ZsCAAZk0aVLeffddn1wBAIArTEWpVCqVOwQAAAAAAEBz55MqAAAAAAAABShVAAAAAAAAClCqAAAAAAAAFKBUAQAAAAAAKECpAgAAAAAAUIBSBQAAAAAAoAClCgAAAAAAQAFKFQAAAAAAgAKUKgAAAAAAAAUoVQAAAAAAAApQqgAAAAAAABTwf8OzHxLZK3G5AAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "df=feature_importances['Future variable importance over time']\n", @@ -2141,18 +1418,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABlUAAANVCAYAAADhqHiEAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAB5mUlEQVR4nOzdeZxVdf0/8NcAM2yKhoogouC+54IamomWCyq2qFj4FcUts/SLmBWaCmq4VIZaam7ghlluWblRrrmkuCSFmSWKC4Y7KAIDc39/+GV+TmxnhguXYZ7Px4OH3HPP/bzfnzN3zgz35eecqlKpVAoAAAAAAACL1KrSDQAAAAAAADQHQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAACapTFjxqSqqqr+T5s2bbL22mtn8ODBef3118teb8aMGRk+fHgeeOCBso+9pF5++eVUVVVlzJgxjX7tAw88kKqqqtx8882L3Xf48OGpqqpqQoeLrr08HtOixo4dm1GjRlW6jRZn4sSJGT58eF5++eX5njv88MPTs2fPZd4TAAAtg1AFAIBmbfTo0Xnssccybty4HH300bnxxhuzyy675KOPPiprnRkzZmTEiBHLZQDQrVu3PPbYY9l3330r3UqjbLvttnnsscey7bbbVrqVJhOqVMbEiRMzYsSIBYYqp512Wm677bZl3xQAAC1Cm0o3AAAAS2KLLbZI7969kyS77bZb5s6dm7POOiu33357DjnkkAp3t3TNnTs3c+bMSdu2bfO5z32u0u00WqdOnZpl38knIVuHDh0q3cYKranHeP31118K3QAAwCesVAEAYIUy70P6V155JUkyYsSI7LjjjuncuXM6deqUbbfdNldddVVKpVKD1913333p27dvVltttbRv3z7rrLNODjjggMyYMSMvv/xy1lhjjfrx5l1y7PDDD19gD2+99VZqampy2mmnzffcP/7xj1RVVeWiiy6q3/e4447LZpttlpVWWildunTJ7rvvnocffrjB6+Zd4uv888/P2WefnV69eqVt27a5//77F3j5r3/9618ZPHhwNtxww3To0CHdu3dP//79M2HChAX2PHPmzAwdOjRdu3ZN+/bts+uuu+aZZ55Z/AFPctNNN6VPnz7p2LFjVlpppey1116FXrugy38dfvjhWWmllfKPf/wje+21Vzp27Jhu3brl3HPPTZI8/vjj+fznP5+OHTtmo402yjXXXNNgzHmXhRs3blwGDx6czp07p2PHjunfv39eeuml+Xq4+uqr89nPfjbt2rVL586d89WvfjXPP/98g33m9TRhwoTsueeeWXnllfPFL34xffv2zR/+8Ie88sorDS5FN0/R917Pnj2z33775e677862226b9u3bZ5NNNsnVV189X7+vv/56jjnmmPTo0SM1NTVZa621cuCBB+Y///lP/T7Tpk3Ld7/73fTq1Ss1NTXp3r17hgwZUnj11uKOyahRo1JVVZV//etf8732+9//fmpqavL222/Xb/vjH/+YL37xi+nUqVM6dOiQnXfeOX/6058avG7epeWefvrpHHjggfnMZz6z0HBkzJgxOeigg5J8EqTOO+7z3v8LuvxXVVVVvvOd72T06NHZeOON0759+/Tu3TuPP/54SqVSfvzjH6dXr15ZaaWVsvvuuy9wbkXmAQDAik+oAgDACmXeh6HzQpCXX3453/zmN/PrX/86t956a772ta/l+OOPz1lnnVX/mpdffjn77rtvampqcvXVV+fuu+/Oueeem44dO2b27Nnp1q1b7r777iTJkUcemcceeyyPPfbYAkOTebX322+/XHPNNamrq2vw3OjRo1NTU1O/iubdd99Nkpxxxhn5wx/+kNGjR2e99dZL3759F3ipsYsuuij33XdffvKTn+Suu+7KJptsssAe3njjjay22mo599xzc/fdd+cXv/hF2rRpkx133DEvvPDCfPufcsopeemll3LllVfmyiuvzBtvvJG+ffsuMIj4tJEjR+Yb3/hGNttss/z617/Oddddl+nTp2eXXXbJxIkTF/nahamtrc3Xvva17Lvvvvntb3+bfv36ZdiwYTnllFNy2GGH5Ygjjshtt92WjTfeOIcffnieeuqp+cY48sgj06pVq/rLcz3xxBPp27dv3n///fp9zjnnnBx55JHZfPPNc+utt+bCCy/Mc889lz59+uTFF19sMN7s2bOz//77Z/fdd89vf/vbjBgxIpdcckl23nnndO3atf498dhjj9W/psh7b56//vWvOemkk3LiiSfmt7/9bbbaaqsceeSReeihh+r3ef3117P99tvntttuy9ChQ3PXXXdl1KhRWWWVVfLee+8l+WR1x6677pprrrkmJ5xwQu666658//vfz5gxY7L//vvPF+j8tyLH5H/+539SU1Mz3z185s6dm+uvvz79+/fP6quvniS5/vrrs+eee6ZTp0655ppr8utf/zqdO3fOXnvttcBA4mtf+1o22GCD/OY3v8lll122wB733XffjBw5Mknyi1/8ov64L+7yd7///e9z5ZVX5txzz82NN96Y6dOnZ999981JJ52URx55JD//+c9z+eWXZ+LEiTnggAMaHKvGzgMAgBVYCQAAmqHRo0eXkpQef/zxUm1tbWn69Oml3//+96U11lijtPLKK5fefPPN+V4zd+7cUm1tbenMM88srbbaaqW6urpSqVQq3XzzzaUkpWeffXah9d56661SktIZZ5xRqL877rijlKR077331m+bM2dOaa211iodcMABC33dnDlzSrW1taUvfvGLpa9+9av12ydNmlRKUlp//fVLs2fPbvCaec+NHj16kePOnj27tOGGG5ZOPPHE+u33339/KUlp2223rT8epVKp9PLLL5eqq6tLRx11VP22M844o/Tpf0JMnjy51KZNm9Lxxx/foNb06dNLXbt2LQ0YMGCh/Xy69v3331+/7bDDDislKd1yyy3122pra0trrLFGKUnp6aefrt/+zjvvlFq3bl0aOnRo/bZ574tPH7tSqVR65JFHSklKZ599dqlUKpXee++9Uvv27Uv77LNPg/0mT55catu2bWngwIHz9XT11VfPN4d99923tO666y5ynqXSwt97pVKptO6665batWtXeuWVV+q3ffzxx6XOnTuXvvnNb9ZvO+KII0rV1dWliRMnLrTOOeecU2rVqlXpySefbLB93nv8zjvvXOhrG3NMvva1r5XWXnvt0ty5c+u33XnnnaUkpd/97nelUqlU+uijj0qdO3cu9e/ff75j8dnPfra0ww471G+b9946/fTTF9rfp/3mN7+Z770zz2GHHTbf1yRJqWvXrqUPP/ywftvtt99eSlLaeuutG3w9Ro0aVUpSeu655xo9DwAAVnxWqgAA0Kx97nOfS3V1dVZeeeXst99+6dq1a+66666sueaaST65rNeXvvSlrLLKKmndunWqq6tz+umn55133snUqVOTJFtvvXVqampyzDHH5Jprrlns6owi+vXrl65du2b06NH12+6555688cYbOeKIIxrse9lll2XbbbdNu3bt0qZNm1RXV+dPf/rTfJehSpL9998/1dXVi60/Z86cjBw5MptttllqamrSpk2b1NTU5MUXX1zguAMHDmxw6ap11103O+20U+6///6F1rjnnnsyZ86cDBo0KHPmzKn/065du+y6664LXGlTRFVVVfbZZ5/6x23atMkGG2yQbt26ZZtttqnf3rlz53Tp0qX+Um+f9t/309lpp52y7rrr1s/nsccey8cffzzfJdx69OiR3XfffYGrDw444IBGzaPIe2+erbfeOuuss07943bt2mWjjTZqMLe77roru+22WzbddNOF1vz973+fLbbYIltvvXWDr8lee+0136XW/ltjjsngwYPz2muv5Y9//GP9ttGjR6dr167p169fkuTRRx/Nu+++m8MOO6xBL3V1ddl7773z5JNPzndJssYe48bYbbfd0rFjx/rH845jv379Grz3522fd+ybMg8AAFZcQhUAAJq1a6+9Nk8++WSeeeaZvPHGG3nuueey8847J0meeOKJ7LnnnkmSK664Io888kiefPLJnHrqqUmSjz/+OMknN7b+4x//mC5duuTb3/521l9//ay//vq58MILm9xXmzZtcuihh+a2226rv+TUmDFj0q1bt+y11171+11wwQX51re+lR133DG33HJLHn/88Tz55JPZe++96/v7tG7duhWqP3To0Jx22mn5yle+kt/97nf5y1/+kieffDKf/exnFzhu165dF7jtnXfeWWiNeffx2H777VNdXd3gz0033dTgvhqN0aFDh7Rr167BtpqamnTu3Hm+fWtqajJz5swF9r6gbfPmM++/Czqea6211nzz7tChQzp16lR4DkXfe/Osttpq843Rtm3bBvu99dZbWXvttRdZ9z//+U+ee+65+b4eK6+8ckql0iK/Jo05Jv369Uu3bt3qQ8P33nsvd9xxRwYNGpTWrVvX95IkBx544Hz9nHfeeSmVSvWXv5un6Pu7Kf77/VNTU7PI7fPeV02ZBwAAK642lW4AAACWxKabbprevXsv8Llf/epXqa6uzu9///sGH9Lffvvt8+27yy67ZJdddsncuXMzfvz4XHzxxRkyZEjWXHPNfP3rX29Sb4MHD86Pf/zj/OpXv8rBBx+cO+64I0OGDKn/0Dn55F4Nffv2zaWXXtrgtdOnT1/gmJ/+P+oX5frrr8+gQYPq7z0xz9tvv51VV111vv3ffPPNBW5b0If988y7b8bNN9+cddddt1Bfy8rC5rPBBhsk+f8hxpQpU+bb74033qif2zxFj/s8jXnvFbXGGmvktddeW+Q+q6++etq3b7/Am9zPe35hGnNMWrdunUMPPTQXXXRR3n///YwdOzazZs3K4MGD56t18cUX53Of+9wCa85bUTZPY4/zstCUeQAAsOISqgAAsMKqqqpKmzZtGoQYH3/8ca677rqFvqZ169bZcccds8kmm+SGG27I008/na9//etp27Zt/euL2nTTTbPjjjtm9OjRmTt37nwfOs/rcd7Y8zz33HN57LHH0qNHj8K1/tuCxv3DH/6Q119/vT5Y+LQbb7wxQ4cOrf9Q+5VXXsmjjz6aQYMGLbTGXnvtlTZt2uTf//73Ur1sU1PccMMNDXp69NFH88orr+Soo45KkvTp0yft27fP9ddfn4MOOqh+v9deey333XdfDjzwwEJ1/ns1yTxNee8tTr9+/XLdddflhRdeyMYbb7zAffbbb7+MHDkyq622Wnr16tWo8Rt7TAYPHpzzzz8/N954Y8aMGZM+ffpkk002qX9+5513zqqrrpqJEyfmO9/5TqN6WZymfD821dKcBwAAzY9QBQCAFda+++6bCy64IAMHDswxxxyTd955Jz/5yU/mCxsuu+yy3Hfffdl3332zzjrrZObMmfX/p/+XvvSlJMnKK6+cddddN7/97W/zxS9+MZ07d87qq6+enj17LrKHI444It/85jfzxhtvZKeddprvw/D99tsvZ511Vs4444zsuuuueeGFF3LmmWemV69emTNnTpPnvt9++2XMmDHZZJNNstVWW+Wpp57Kj3/844VePmrq1Kn56le/mqOPPjoffPBBzjjjjLRr1y7Dhg1baI2ePXvmzDPPzKmnnpqXXnope++9dz7zmc/kP//5T5544ol07NgxI0aMaPIclsT48eNz1FFH5aCDDsqrr76aU089Nd27d89xxx2XJFl11VVz2mmn5ZRTTsmgQYPyjW98I++8805GjBiRdu3a5YwzzihUZ8stt8ytt96aSy+9NNttt11atWqV3r17F37vNcaZZ56Zu+66K1/4whdyyimnZMstt8z777+fu+++O0OHDs0mm2ySIUOG5JZbbskXvvCFnHjiidlqq61SV1eXyZMn5957781JJ52UHXfccYHjN/aYbLLJJunTp0/OOeecvPrqq7n88ssbPL/SSivl4osvzmGHHZZ33303Bx54YLp06ZK33norf/3rX/PWW2/Nt0KrqC222CJJcvnll2fllVdOu3bt0qtXr0WurGqqpTkPAACaH6EKAAArrN133z1XX311zjvvvPTv3z/du3fP0UcfnS5duuTII4+s32/rrbfOvffemzPOOCNvvvlmVlpppWyxxRa544476u+LkSRXXXVVTj755Oy///6ZNWtWDjvssIwZM2aRPXz961/PkCFD8tprry3wg/pTTz01M2bMyFVXXZXzzz8/m222WS677LLcdtttTb7Re5JceOGFqa6uzjnnnJMPP/ww2267bW699db88Ic/XOD+I0eOzJNPPpnBgwdn2rRp2WGHHfKrX/0q66+//iLrDBs2LJtttlkuvPDC3HjjjZk1a1a6du2a7bffPscee2yT+19SV111Va677rp8/etfz6xZs7LbbrvlwgsvbHD/jGHDhqVLly656KKLctNNN6V9+/bp27dvRo4cmQ033LBQnf/93//N3//+95xyyin54IMPUiqVUiqVCr/3GqN79+554okncsYZZ+Tcc8/NO++8kzXWWCOf//zn6+fVsWPHPPzwwzn33HNz+eWXZ9KkSWnfvn3WWWedfOlLX1psCNjYYzJ48OAcc8wxad++fQ4++OD5nv+f//mfrLPOOjn//PPzzW9+M9OnT0+XLl2y9dZb5/DDD2/ScUiSXr16ZdSoUbnwwgvTt2/fzJ07N6NHj16iMRdlac0DAIDmp6pUKpUq3QQAAEA5jBkzJoMHD86TTz650HvtAAAANFWrSjcAAAAAAADQHAhVAAAAAAAACnD5LwAAAAAAgAKsVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKCANpVuYFmrq6vLG2+8kZVXXjlVVVWVbgcAAAAAAKigUqmU6dOnZ6211kqrVotei9LiQpU33ngjPXr0qHQbAAAAAADAcuTVV1/N2muvvch9WlyosvLKKyf55OB06tSpLGPW1tbm3nvvzZ577pnq6uqyjAlQDs5PwPLK+QlYXjk/Acsr5ydgebUinJ+mTZuWHj161OcHi9LiQpV5l/zq1KlTWUOVDh06pFOnTs32TQOsmJyfgOWV8xOwvHJ+ApZXzk/A8mpFOj8VuWWIG9UDAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAioaqjz00EPp379/1lprrVRVVeX2229f7GsefPDBbLfddmnXrl3WW2+9XHbZZUu/UQAAAAAAoMWraKjy0Ucf5bOf/Wx+/vOfF9p/0qRJ2WeffbLLLrvkmWeeySmnnJITTjght9xyy1LuFAAAAAAAaOnaVLJ4v3790q9fv8L7X3bZZVlnnXUyatSoJMmmm26a8ePH5yc/+UkOOOCABb5m1qxZmTVrVv3jadOmJUlqa2tTW1vb9OY/Zd445RoPoFycn4DllfMTsLxyfgKWV85PwPJqRTg/Nab3qlKpVFqKvRRWVVWV2267LV/5ylcWus8XvvCFbLPNNrnwwgvrt912220ZMGBAZsyYkerq6vleM3z48IwYMWK+7WPHjk2HDh3K0jsAAAAAANA8zZgxIwMHDswHH3yQTp06LXLfiq5Uaaw333wza665ZoNta665ZubMmZO333473bp1m+81w4YNy9ChQ+sfT5s2LT169Miee+652INTVG1tbcaNG5c99thjgcFOc/JC7+3LMs7G459crms297otaa6NrduS5lqkbl3btpl06inp9aORafWpVXtLs2ZRzaFuS5prueq2pLlWqu6KMlfnp+Wjrrku3Zotre6KMlfnp+Wjbkuaa2PrtqS5lqvuijLXIuenxtZdXue6PNdtSXNtbN2WNNdy1W0uc12cFeHz8XlXuCqiWYUqyScrWj5t3kKb/94+T9u2bdO2bdv5tldXV5f9C7w0xlzWWi/ih3JjNOY4VKJmc6/bkuba2Lotaa6Nqdtq1qyF7ruizbWcdVvSXMtVtyXNtVJ1V7S5Oj9Vtq65Lt2aLa3uijZX56fK1m1Jc21s3ZY013LVXdHmuqjzU2PrLu9zXR7rtqS5NrZuS5prueo2l7k2Ztzm+vl4Y/qu6I3qG6tr16558803G2ybOnVq2rRpk9VWW61CXQEAAAAAAC1BswpV+vTpk3HjxjXYdu+996Z3797NNgEDAAAAAACah4qGKh9++GGeffbZPPvss0mSSZMm5dlnn83kyZOTfHI/lEGDBtXvf+yxx+aVV17J0KFD8/zzz+fqq6/OVVddle9+97uVaB8AAAAAAGhBKnpPlfHjx2e33XarfzzvhvKHHXZYxowZkylTptQHLEnSq1ev3HnnnTnxxBPzi1/8ImuttVYuuuiiHHDAAcu8dwAAAAAAoGWpaKjSt2/f+hvNL8iYMWPm27brrrvm6aefXopdAQAAAAAAzK9Z3VMFAAAAAACgUoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABTQptINAAAALMqAYUv+z5YJZegDAADAShUAAAAAAIACrFQBAIBmyOoNAACAZc9KFQAAAAAAgAKsVAEAAAAAgEawcrzlslIFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAtxTBVhqynFtycT1JQFgeeFnOwAA0NJZqQIAAAAAAFCAlSoANAuV+L+j/R/ZAAAAAHyaUAWgGVvch/5t0yanJTl8aJvMytwF7uMDfwAAAAAoRqgCQKNYvQEAAABAS+WeKgAAAAAAAAUIVQAAAAAAAApw+S9oAVyuCQAAAABgyVmpAgAAAAAAUICVKrRYVm8AAAAAANAYQhVYxsoR5ghyAAAAAACWPaEKFWfFCAAAAAAAzYF7qgAAAAAAABQgVAEAAAAAACjA5b9owKW4AAAAAABgwaxUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFBAm0o3AAA0NGBYeX48T6hA3cbWBAAAAGhOhCoAAAAAALCcq9T/hElDQhUAgGXEaiAAAABo3txTBQAAAAAAoAArVQCAFseSaQAAAKAprFQBAAAAAAAowEoVAKBirBgBAAAAmhMrVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKaFPpBgAAAAAAgEWbMGlypVsgVqoAAAAAAAAUIlQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgADeqBwAAWE4MGFaef6JNKMsoAADAf7NSBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAACigTaUbAFgRDBhWntPphLKMAgAAAAAsDVaqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIB7qgAAAPwX90sDAAAWxEoVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAACnBPFQAAAIBmwj2fAKCyrFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAtpUugEAAAAAWF4MGFaej8smlGUUYHk1YdLkSrdAhVipAgAAAAAAUIBQBQAAAAAAoACX/wIoA0s+AQAAAGDFZ6UKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFBAm0o3AAAAAAAATTFgWHk+4p5QllFoCaxUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFBAm0o3AAAAAAAATTFh0uRKt0ALY6UKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKKBNpRsAAIDmbMCw8vxKPaEsowAAALA0WakCAAAAAABQgFAFAAAAAACgAJf/AgAAAACWGZdPBZozK1UAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAW0q3cAll1ySH//4x5kyZUo233zzjBo1KrvssstC97/hhhty/vnn58UXX8wqq6ySvffeOz/5yU+y2mqrLcOuAQAAAACYZ8Cw8nzUPKEso8DSU9FQ5aabbsqQIUNyySWXZOedd84vf/nL9OvXLxMnTsw666wz3/5//vOfM2jQoPzsZz9L//798/rrr+fYY4/NUUcdldtuu60CMwAAoKWbMGlypVsAAABgGano5b8uuOCCHHnkkTnqqKOy6aabZtSoUenRo0cuvfTSBe7/+OOPp2fPnjnhhBPSq1evfP7zn883v/nNjB8/fhl3DgAAAAAAtDQVW6kye/bsPPXUU/nBD37QYPuee+6ZRx99dIGv2WmnnXLqqafmzjvvTL9+/TJ16tTcfPPN2XfffRdaZ9asWZk1a1b942nTpiVJamtrU1tbW4aZpH6cco1XSW3TtizjNOZYVKJmc6/bkuba2Lpz21ZmrmnVrix1U+ZjXJOaBv9dcMnl/+taqbotaa7lqtuS5lqpuo2tWanz4uLq1v3f83WL2K+5zHV5/RlQrKTvnaVZsznVLcf3T3P5nm1J56fmXLclzbWxdZvzeaJSdVeUc3GR81Nj61bq6/r0K1MrUtf5aenWrdT3TqV+Hy9L3eYy18UO1/w/H29M71WlUqm0FHtZqDfeeCPdu3fPI488kp122ql++8iRI3PNNdfkhRdeWODrbr755gwePDgzZ87MnDlzsv/+++fmm29OdXX1AvcfPnx4RowYMd/2sWPHpkOHDuWZDAAAAAAA0CzNmDEjAwcOzAcffJBOnTotct+K36i+qqqqweNSqTTftnkmTpyYE044Iaeffnr22muvTJkyJSeffHKOPfbYXHXVVQt8zbBhwzJ06ND6x9OmTUuPHj2y5557LvbgFFVbW5tx48Zljz32WGi401z0GdunLOM8NvCx5bpmc6/bkuba2Lov9N6+LDU3Hv9k415wztplqZthrzVq98Ud45rU5Purfj/nvX9eZmf2AvdpDl/XStVtSXMtV92WNNdK1W1szUqdFxdXt65t20w69ZT0+tHItPrUquKlWbOoFeVnQBG+d5ZuzeZUtxzfP83le7YlnZ+ac92WNNfG1m3O54mkebyfltdjXOT8lDRuvi3t9yfnp6Vbt1LfO5V6P5WlbiVqNqXuYqwIn4/Pu8JVERULVVZfffW0bt06b775ZoPtU6dOzZprrrnA15xzzjnZeeedc/LJJydJttpqq3Ts2DG77LJLzj777HTr1m2+17Rt2zZtF7Dcrbq6uuxf4KUx5rI2Kwv/odwYjTkOlajZ3Ou2pLk2tm7rRfxiubRqJknqZpalbpbSMZ6d2Qvdtzl8XStVtyXNtVx1W9JcK1W3sTUrdV4sWrfVrFkL3be5zHV5/xmw6JK+d5ZmzeZUtxzfP83le7YlnZ+ac92WNNfG1m3O54lK1V3RzsWLOj81tm5L+/3J+Wnp1q3U906l3k9lqdtc5lp42Ob7+Xhj+q7Yjepramqy3XbbZdy4cQ22jxs3rsHlwD5txowZadWqYcutW7dO8skKFwAAAAAAgKWlopf/Gjp0aA499ND07t07ffr0yeWXX57Jkyfn2GOPTfLJpbtef/31XHvttUmS/v375+ijj86ll15af/mvIUOGZIcddshaa61VyakAAAAAQLMyYFh5PhqcUJZRAJqHioYqBx98cN55552ceeaZmTJlSrbYYovceeedWXfddZMkU6ZMyeTJk+v3P/zwwzN9+vT8/Oc/z0knnZRVV101u+++e84777xKTQEAAAAAAGghKn6j+uOOOy7HHXfcAp8bM2bMfNuOP/74HH/88Uu5KwAAAAAAipowafLid4IVQMXuqQIAAAAAANCcCFUAAAAAAAAKEKoAAAAAAAAUUPF7qgAAQDkMGFaeX20nlGUUAAAAVkRWqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAC2lS6AQAAAABg2ZswaXKlWwBodqxUAQAAAAAAKECoAgAAAAAAUIDLfwEAsEJw+QoAAACWNitVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgALaVLoBAAAAAIAV0YBh5fn4dUIzqQstgZUqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAW0qXQDAACseAYMW/SvmW3TJqclOXxom8zK3AXuM2Ep9AUAAMvShEmTW1RdaAmsVAEAAAAAAChAqAIAAAAAAFCAy38BK5TFXW6mKJecAQAAAAD+m5UqAAAAAAAABVipAgAAAJRNOVaPWzkOACyvrFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAtpUugEAAFY8EyZNXuTzta3a5c5Vk8deeTXVdTOXTVMAAACwhKxUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAD3VAFWKIu7hj8AAAAAQFMJVQAAAICy8T86AQArMpf/AgAAAAAAKMBKFQAAAABghTdg2JJ/FDqhDH0AzZuVKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABbSrdAAAAS8+AYeX5dW9CWUYBYFnyMwAAoPysVAEAAAAAACjAShUAAAAAYIU3YdLkSrcArACsVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKaFPpBgAAWHomTJpc6RYAqBA/AwAAys9KFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABbSrdAMCKoOfMsWUZ5+WyjAIAAAAALA1WqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAN6oHAAAAmrUBw8rz8caEsowCAKzIrFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAtpUugEAAABYkQ0YVp5/ek8oyygAACwJK1UAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAowD1VAAAAgGZtwqTJlW4BAGghrFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAtpUugEAAABYkU2YNLnSLQAAUCZWqgAAAAAAABQgVAEAAAAAACjA5b8AAAAAmmDAsPJ8rDKhLKMAAMuClSoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAowI3qAQAAAKDCBgxb9Md0bdMmpyU5fGibzMrche43ocx9AdCQUAUAAACARVrcB/5F+cAfgObO5b8AAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgBvVAwAAADTBhEmTK90CALCMVXylyiWXXJJevXqlXbt22W677fLwww8vcv9Zs2bl1FNPzbrrrpu2bdtm/fXXz9VXX72MugUAAAAAAFqqiq5UuemmmzJkyJBccskl2XnnnfPLX/4y/fr1y8SJE7POOuss8DUDBgzIf/7zn1x11VXZYIMNMnXq1MyZM2cZdw4AAAAAALQ0FQ1VLrjgghx55JE56qijkiSjRo3KPffck0svvTTnnHPOfPvffffdefDBB/PSSy+lc+fOSZKePXsuy5YBAAAAAIAWqmKhyuzZs/PUU0/lBz/4QYPte+65Zx599NEFvuaOO+5I7969c/755+e6665Lx44ds//+++ess85K+/btF/iaWbNmZdasWfWPp02bliSpra1NbW1tWeYyb5xyjVdJbdO2LOM05lhUomZzr9uS5trouq3alaVmGjvX1qUylS3vMa5JTYP/LouaRTWHui1pruWq25LmWqm6jf59o0LnxcXVrf2/52sXtd8KMtelVdf37PJfsznVndt22R/jctRcGnXr/u/5ukXst6Kci5frui1pro2s6/y09Gsur8e4yL/vGl23BX3vlK1uS5prY+u2pLmWq25zmetih2v+n483pveqUqlUnk8CG+mNN95I9+7d88gjj2SnnXaq3z5y5Mhcc801eeGFF+Z7zd57750HHnggX/rSl3L66afn7bffznHHHZfdd999ofdVGT58eEaMGDHf9rFjx6ZDhw7lmxAAAAAAANDszJgxIwMHDswHH3yQTp06LXLfil7+K0mqqqoaPC6VSvNtm6euri5VVVW54YYbssoqqyT55BJiBx54YH7xi18scLXKsGHDMnTo0PrH06ZNS48ePbLnnnsu9uAUVVtbm3HjxmWPPfZIdXV1WcaslD5j+5RlnMcGPrZc12zudVvSXBtd95y1y1Izw15r1O5bDL+nLGX/NnyvRu2/uGNck5p8f9Xv57z3z8vszF7gPs3i61qhui1pruWq25LmWqm6ja1ZqfPi4urWtmqXcVtelD0mnJDqupnLpGZhzaSu79nlv2ZzqvtC7+2XuObG459c5jWXRt26tm0z6dRT0utHI9PqU1c9WJKaLe38VJa6LWmujaxbqfNEpY5xJc5Py+sxLvT7U9K4Y9yCvnfKVrclzbWxdVvSXMtVt7nMdTFWhM/H513hqoiKhSqrr756WrdunTfffLPB9qlTp2bNNddc4Gu6deuW7t271wcqSbLpppumVCrltddey4Ybbjjfa9q2bZu2C1gqWl1dXfYv8NIYc1mblQX/o6GxGnMcKlGzuddtSXNtdN1F/WLZuKKN2n3W3AWHwY0vu3SO8ezMXui+zeLrWqG6LWmu5arbkuZaqbqN/l2jQufFonWr62Yu/EOBFWyu5a7re3b5r9mc6rZeSHiwvNdcmnVbzZq10H1XtHPxclm3Jc21kXUrdZ6o1DGuxPlpeT/Gi/z96ZPCZa+5WM3k/eT8tJTrtqS5lqtuc5lr4WGb7+fjjem71VLsY5Fqamqy3XbbZdy4cQ22jxs3rsHlwD5t5513zhtvvJEPP/ywfts///nPtGrVKmuvXaaUDgAAAAAAYAEqFqokydChQ3PllVfm6quvzvPPP58TTzwxkydPzrHHHpvkk0t3DRo0qH7/gQMHZrXVVsvgwYMzceLEPPTQQzn55JNzxBFHLPRG9QAAAAAAAOVQ0XuqHHzwwXnnnXdy5plnZsqUKdliiy1y5513Zt11102STJkyJZMnT67ff6WVVsq4ceNy/PHHp3fv3llttdUyYMCAnH322ZWaAgAAAAAA0EJU/Eb1xx13XI477rgFPjdmzJj5tm2yySbzXTIMAAAAAABgaavo5b8AAAAAAACaC6EKAAAAAABAAUIVAAAAAACAAip+TxUAAAAAAFZsc1u3S2271ZOqqkXvOHNm4wZeqUfTm6p03eYy18Wora1NmzZtMnPmzMydO7esY5dTdXV1WrduvcTjCFUAAAAAAFhqPlx107y29Ukpte20+J0nTWrc4Dv/tGlNLQ91m8tcF6NUKqVr16559dVXU7W40KyCqqqqsvbaa2ellVZaonGEKgAAAAAALBVzW7fLa1uflA5demaNjq0Xu1AlXXo1rsDUj5vcW8XrNpe5LkZdXV0+/PDDrLTSSmnVavm840ipVMpbb72V1157LRtuuOESrVgRqgAAAAAAsFTUtls9pbadskbH1mlfXWAVQ7t2jSvQpkwrIypRt7nMdTHq6uoye/bstGvXbrkNVZJkjTXWyMsvv5za2tolClWW3xkCAAAAANC8/d/SlOX4qlC0EOW6NJlQBQAAAAAAoAChCgAAAAAAQAHuqQIAAAAAwDLV86I3FvLMwrYvmZdPWGupjLu0DB8+PLfffnueffbZSrfCf7FSBQAAAAAAPqVv374ZMmRIpdtgOdTkUOW6667LzjvvnLXWWiuvvPJKkmTUqFH57W9/W7bmAAAAAABgeVMqlTJnzpxKt0EFNOnyX5deemlOP/30DBkyJD/60Y8yd+7cJMmqq66aUaNG5ctf/nJZmwQAAACg5RkwbMmvXD+hDH0ALcvhhx+eBx98MA8++GAuvPDCJMno0aMzePDg3H333Tn11FPz3HN/zT03/CJ9d+qdH196TS677pZMmfp2Nuq1Tk4bcnQO3O9LSZIHHh2f3Q46Jn/81aX5/siLMvGfk7L15htl9AXDs/Fa29TXPPfcc/Ozn/0sM2bMyIABA7LGGmtUZO4sXpNWqlx88cW54oorcuqpp6Z169b123v37p0JE/yoAgAAAACgebrwwgvTp0+fHH300ZkyZUqmTJmSHj16JEm+973v5ZxzzsnzD9ySrTbdMD887xcZfdMdufScYfn7fb/JiUcfkv854Yd58LGnGox56nm/yE9PH5rxd12fNm1a54iTRtQ/9+tf/zpnnHFGfvSjH2X8+PHp1q1bLrnkkmU6Z4prUtw/adKkbLPNNvNtb9u2bT766KMlbgoAAAAAACphlVVWSU1NTTp06JCuXbsmSf7xj38kSc4888zsscceyRvP5KMZH+eCK27IfTddlj69P5skWW/dtfPnJ5/NL6+/Jbv22a5+zB99/9v1j3/w7cHZd9AJmTlzZtq1a5dRo0bliCOOyFFHHZUkOfvss/PHP/4xM2fOXJbTpqAmhSq9evXKs88+m3XXXbfB9rvuuiubbbZZWRoDAAAAAIDlSe/evev/PvGfL2XmzFnZ4xvHNdhndm1tttlikwbbttpso/q/d1tz9STJ1KlTs8466+T555/Pscce22D/Pn365P777y93+5RBk0KVk08+Od/+9rczc+bMlEqlPPHEE7nxxhtzzjnn5Morryx3jwAAAAAAUHEdO3as/3tdXV2S5A/XXpTuXRveA6VtTU2Dx9Vt/v9H8VWpavB6mpcmhSqDBw/OnDlz8r3vfS8zZszIwIED071791x44YX5+te/Xu4eAQAAAABgmampqcncuXMXuc9mG62Xtm1rMvn1KQ0u9dVYm266aR5//PEMGjSoftvjjz/e5PFYupoUqiTJ0UcfnaOPPjpvv/126urq0qVLl3L2BQAAAAAAFdGzZ8/85S9/ycsvv5yVVlppgatKVl6pY777zUNz4vALUldXyud32DrTPvwoj47/a1bq0CGHDehfqNb//u//5rDDDkvv3r3z+c9/PjfccEP+/ve/Z7311iv3tCiDJt+ofs6cOdlwww2z+uqr129/8cUXU11dnZ49e5arPwAAAAAAVjAvn7DWgp9Ya5vGDfTGM0vezAJ897vfzWGHHZbNNtssH3/8cUaPHr3A/c763nHpsnrnnPPz0Xlp8mtZtdPK2XbLTXLK8UcUrnXwwQfn3//+d77//e9n5syZOeCAA/Ktb30r99xzT7mmQxk1KVQ5/PDDc8QRR2TDDTdssP0vf/lLrrzyyjzwwAPl6A0AAAAAAJa5jTbaKI899liDbYcffvh8+1VVVeWEI7+RE478xgLH6btT75Ref7rBtq232PiTbWv1rN92yimn5JRTTmmw33nnnde05lmqWjXlRc8880x23nnn+bZ/7nOfy7PPPrukPQEAAAAAACx3mhSqVFVVZfr06fNt/+CDDxZ78x4AAAAAAIDmqEmhyi677JJzzjmnQYAyd+7cnHPOOfn85z9ftuYAAAAAAACWF026p8r555+fL3zhC9l4442zyy67JEkefvjhTJs2Lffdd19ZGwQAAAAAAFgeNGmlymabbZbnnnsuAwYMyNSpUzN9+vQMGjQo//jHP7LFFluUu0cAAAAAAICKa9JKlSRZa621MnLkyHL2AgAAAAAAsNxqcqjy/vvv54knnsjUqVNTV1fX4LlBgwYtcWMAAAAAAADLkyaFKr/73e9yyCGH5KOPPsrKK6+cqqqq+ueqqqqEKgAAAAAAwAqnSfdUOemkk3LEEUdk+vTpef/99/Pee+/V/3n33XfL3SMAAAAAAEDFNWmlyuuvv54TTjghHTp0KHc/AAAAAACs6C7vu2zrHfPAMikz/KeXZcQFlydJfjb8pAw5+pAkSVX3bXPbVT/NV/bebZn00Rh9+/bNgw8+mCR55plnsvXWW1e2oeVck1aq7LXXXhk/fny5ewEAAAAAgGZt843Xz5Rn7s0x//O1pVrn7y/8Owcc/d303HHfVHXfNqOuuGG+fc65+Opsv8//ZOWNPp8uW30xXzliaF544YUG+9x666154oknlmqvK5ImrVTZd999c/LJJ2fixInZcsstU11d3eD5/fffvyzNAQAAAABAc9Kmdet07bL6Uq8z4+OZWW+d7jlovz1y4vCfLnCfBx9/Kt8+bEC233rzzJkzN6ee9/PsueeemThxYjp27Jgk6dy5c6ZNm7bU+11RNClUOfroo5MkZ5555nzPVVVVZe7cuUvWFQAAAAAAVMC1116bE088MW+88Ubatm1bv/2AAw5Ix44dc+211y5xje//6MLcNu7RvPbaa+natWsOOeSQnH766Q0WMJx99tm56KKL8vHHH+fggw/O6u3m5u77H82z436VJNl+682z/dabJ0l+MPKiBda5+4ZfNHg8+mcj0mWrL+app57KF77whSWeR0vUpMt/1dXVLfSPQAUAAAAAgObqoIMOyty5c3PHHXfUb3v77bfz+9//PoMHDy5LjZU7dsyYMWMyceLEXHjhhbniiivys5/9rP75G264IT/60Y9y3nnn5amnnso666yTS6+9eYnrfjBtepJPVqfQNE0KVQAAAAAAYEXUvn37DBw4MKNHj67fdsMNN2TttddO3759y1Ljh0OOyk477ZSePXumf//+Oemkk/LrX/+6/vmLL744Rx55ZAYPHpyNNtoop59+erbcZIMlqlkqlTJ0xAX5/Oc/ny222GJJp9BiNenyX0ny0Ucf5cEHH8zkyZMze/bsBs+dcMIJS9wYAAAAAABUwtFHH53tt98+r7/+erp3757Ro0fn8MMPT1VVVVnGv/n3f8yoa4/Pv/71r3z44YeZM2dOOnXqVP/8Cy+8kOOOO67Ba3bYevPc98iTTa75nVPPzXPPv5g/P+am9EuiSaHKM888k3322SczZszIRx99lM6dO+ftt99Ohw4d0qVLF6EKAAAAAADN1jbbbJPPfvazufbaa7PXXntlwoQJ+d3vfleWsR9/6rl8/bhhGTFiRPbaa6+sssoq+dWvfpWf/rThzeb/O8AplUpNrnn8D8/LHfc+lIduvTJrr712k8ehiZf/OvHEE9O/f/+8++67ad++fR5//PG88sor2W677fKTn/yk3D0CAAAAAMAyddRRR2X06NG5+uqr86UvfSk9evQoy7iPPPnXrLt2t5x66qnp3bt3Ntxww7zyyisN9tl4443zxBMNV5SMf25io2uVSqV859Rzc+td9+W+X/8yvdbpvkS908RQ5dlnn81JJ52U1q1bp3Xr1pk1a1Z69OiR888/P6ecckq5ewQAAAAAgGXqkEMOyeuvv54rrrgiRxxxRNnG3aBXj0x+/c386le/yr///e9cdNFFue222xrsc/zxx+eqq67KNddckxdffDFnn312nnv+Xw1Wr8yeXZtn//ZCnv3bC5ldW5vX35yaZ//2Qv41aXL9Pt8+5dxcf+udGfvzkVl5pQ55c+rbefPNN/Pxxx+XbT4tTZMu/1VdXV3/xVtzzTUzefLkbLrppllllVUyefLkxbwaAAAAAIAW7ZgHFrx9rW0aN84bzyxxKwvTqVOnHHDAAfnDH/6Qr3zlK2Ub98t79c2JRw/Md77zncyaNSv77rtvTjvttAwfPrx+n0MOOSQvvfRSvvvd72bmzJkZMGBADh/QP08887f6fd74z1vZZq9v1D/+yWXX5SeXXZdd+2yXB26+Ikly6bW/SZL0PfDoBj3Mu0cMjdekUGWbbbbJ+PHjs9FGG2W33XbL6aefnrfffjvXXXddttxyy3L3CAAAAAAAy9yUKVNyyCGHpG3btks0Tun1pxs8Pv+HQ3L+Jdc02DZkyJAGj0877bScdtpp9Y/3+MLnskHP/38Jsp491ppv3MXVTdL44IoGmnT5r5EjR6Zbt25JkrPOOiurrbZavvWtb2Xq1Kn55S9/WdYGAQAAAABgWXr33Xfzq1/9Kvfdd1++/e1vN+q1E/7xr6y04c65ZMyvm1x/xowZueCCC/L3v/89//jHP3LGGWfkjw//JYcd1L/JYy5Mv379svnmm5d93BVVk1aq9O7du/7va6yxRu68886yNQQAAAAAAJW07bbb5r333st5552XjTfeuH775ptv/slN5Ut1873ml+edmhOO+Eb+52v7JEnWWO0zTa5fVVWVO++8M2effXZmzZqVjTfeOLdc8eN86Qs7NnnMhbnyyivr77GyzjrrlH38FU2TQpXdd989t956a1ZdddUG26dNm5avfOUrue+++8rRGwAAAAAALHMvv/zyArffeeedqa2tTf7z9/meW3ON1bLySh3T+TOrLHH99u3b549//GPDjUvp/jHdu3dfKuOuqJoUqjzwwAOZPXv2fNtnzpyZhx9+eImbAgAAAACA5c266677yV86TK9sI1RMo0KV5557rv7vEydOzJtvvln/eO7cubn77rulWgAAAAAAwAqpUaHK1ltvnaqqqlRVVWX33Xef7/n27dvn4osvLltzAAAAAAAAy4tGhSqTJk1KqVTKeuutlyeeeCJrrLFG/XM1NTXp0qVLWrduXfYmAQAAAAAAKq1Rocq6666b2traDBo0KJ07d/7/148DAAAAAABYwbVq7Auqq6vz29/+dmn0AgAAAAAAsNxq1EqVeb7yla/k9ttvz9ChQ8vdDwAAAAAAK7gtxw1apvUm7HHtMq1XTsOHD8/tt9+eZ+8cXelWSBNDlQ022CBnnXVWHn300Wy33Xbp2LFjg+dPOOGEsjQHAAAAAAAtRVVVVW677bZ85StfqXQrLESTQpUrr7wyq666ap566qk89dRTDZ6rqqoSqgAAAAAAACucRt9TJUkmTZq00D8vvfRSuXsEAAAAAIBlpm/fvjn++OMzZMiQfOYzn8maa66Zyy+/PB999FEGDx6clTf6fNbfaf/cdd8j9a958LGnssO+h6Ztrx3TbZs984ORF2XOnDn/f8wDj84Jp52f7509Kp0375uuXbtm+PDh9c/37NkzSfLVr341VVVV9Y/nue7m36fnjvtmlU2+kK9/6weZ/uFHS/MQsBBNWqnyaaVSKcknK1QAKq3nzLFlGeflsowCAAAAQHN1zTXX5Hvf+16eeOKJ3HTTTfnWt76V22+/PV/96ldzypFfyc+uuCGHnnBaJj/5h7z3/vTsc+jxOXxA/1x74Zn5x79eztEnn5V2bWsy/KRj//+Yv/l9hh5zSP7yu2vz2Esf5PDDD8/OO++cPfbYI08++WS6dOmS0aNHZ++9907r1q3rX/fvf/87t9/zQH5/zYV574NpGXDsD3Luz0fnRz/4TiUOTYvWpJUqSXLttddmyy23TPv27dO+fftstdVWue6668rZGwAAAAAAVMRnP/vZ/PCHP8yGG26YYcOGpX379ll99dVz9NFHZ8P11snpJx6dd957P89NfDGXXPPr9Fira37+ox9kkw165St775YRJx2bn/7y+tTV1dWPudWmG+SMod/Mhuutk0GDBqV3797505/+lCRZY401kiSrrrpqunbtWv84Serq6jLmZyOyxSYbZJcdt82hB+yTP/35iWV7QEjSxJUqF1xwQU477bR85zvfyc4775xSqZRHHnkkxx57bN5+++2ceOKJ5e4TAAAAAACWma222qr+761bt85qq62WLbfcsn7bmmusliSZ+s57ef5fk9Jnuy0bXNFp5+23zocfzchrU/6Tdbp3+2TMTTdsUKNbt26ZOnXqYnvp2bNnVl6p4/9/XZfVM/Wd95o2MZZIk0KViy++OJdeemkGDRpUv+3LX/5yNt988wwfPlyoAgAAAABAs1ZdXd3gcVVVVYNt8wKUurq6lErz3yKj/tYZ+f/bq9s0/Ei+qqqqwUqWxvRS5HWUX5Mu/zVlypTstNNO823faaedMmXKlCVuCgAAAAAAmovNNuyVR8c/Vx+kJMmj4/+alVfqmO7duhQep7q6OnPnzl0aLVImTQpVNthgg/z617+eb/tNN92UDTfccAGvAAAAAACAFdNxhw3Iq2+8meN/eF7+8a9J+e09D+SMn16Wocccklatin8M37Nnz/zpT3/Km2++mffec3mv5VGTLv81YsSIHHzwwXnooYey8847p6qqKn/+85/zpz/9aYFhCwAAAAAAzDNhj2sX/MRa2zRuoDeeWfJmyqB7ty6587qLc/LZo/LZPb6ezquukiO/8ZX88H+PatQ4P/3pTzN06NBcccUV6d69e15++eWl0zBN1qRQ5YADDshf/vKX/OxnP8vtt9+eUqmUzTbbLE888US22aaRb3oAAAAAAFiOPPDAA/NtW1DAUXr96fq/79pnuzzxh+sWPubNV8y37fbbb2/wuH///unfv3+DbcOHD8/w4cMbBEhDjj4kQ44+ZKG1WHqaFKokyXbbbZfrr7++nL0AAAAAAAAst5ocqsydOze33XZbnn/++VRVVWXTTTfNl7/85bRp0+QhAQAAAAAAlltNSkD+9re/5ctf/nLefPPNbLzxxkmSf/7zn1ljjTVyxx13ZMsttyxrkwAAAAAAAJXWqikvOuqoo7L55pvntddey9NPP52nn346r776arbaaqscc8wx5e4RAAAAAACg4pq0UuWvf/1rxo8fn8985jP12z7zmc/kRz/6UbbffvuyNQcAAAAAALC8aNJKlY033jj/+c9/5ts+derUbLDBBkvcFAAAAAAAwPKmSaHKyJEjc8IJJ+Tmm2/Oa6+9ltdeey0333xzhgwZkvPOOy/Tpk2r/wMAAAAAALAiaNLlv/bbb78kyYABA1JVVZUkKZVKSZL+/fvXP66qqsrcuXPL0ScAAAAAAEBFNSlUuf/++8vdBwAAAAAAwHKtSaHKrrvuWu4+AAAAAABoIZ7ffeAyrbfpfWPLOt4Dj47Pbgcdk/cmPphVV1m5rGNXwgMPPJDddtst7733XlZdddVKt7Nca1KokiQzZ87Mc889l6lTp6aurq7Bc/vvv/8SNwYAAAAAACxfxowZkyFDhuT999+vdCv1lmVPTQpV7r777gwaNChvv/32fM+5jwoAAAAAAJTX7Nm1qamprnQbZVMqlTJ37ty0adPktR8V0aopL/rOd76Tgw46KFOmTEldXV2DPwIVAAAAAACau1mzZuWEE05Ily5d0q5du3z+85/Pk08+2WCfR558Np/90sFpt97nsuN+gzLh+Rfrn3vltTfS/7D/zWc22zUdN9gpm+92YO7805/rn584cWL22WefrLTSSllzzTVz6KGHNljI0Ldv33znO9/J0KFDs/rqq2ePb3wr3zhuWL7+rR806KG2tjarb7F7Rt/02ySfhBXnXzIm6/Xpn/br98lnv3Rwbv79Hxu85s4778xGG22U9u3bZ7fddsvLL79c6Jg88MADGTx4cD744INUVVWlqqoqI0aMSJJcf/316d27d1ZeeeV07do1AwcOzNSpUxu8tqqqKvfcc0969+6dtm3b5uGHH8706dNzyCGHpGPHjunWrVt+9rOfpW/fvhkyZEj9a2fPnp3vfe976d69ezp27Jgdd9wxDzzwwEJ7Gj58eKH5NEWTQpWpU6dm6NChWXPNNcvdDwAAAAAAVNz3vve93HLLLbnmmmvy9NNPZ4MNNshee+2Vd999t36fk88elZ+cfmKe/MN16bLaZ7L/4BNTW1ubJPn2Kedm1uzaPHTLlZnwp1/nvFNOyEod2ydJpvznrey6667ZeuutM378+Nx99935z3/+kwEDBjTo4ZprrkmbNm3yyCOP5JfnnZpDvtovd9z7UD78aEb9Pvc88Fg++vjjHLDPF5MkPzzvFxl90x259Jxh+ft9v8mJRx+S/znhh3nwsaeSJK+++mq+9rWvZZ999smzzz6bo446Kj/4QcOgZmF22mmnjBo1Kp06dcqUKVMyZcqUnHTSSUk+CT7OOuus/PWvf83tt9+eSZMm5fDDD1/gcT3nnHPy/PPPZ6uttsrQoUPzyCOP5I477si4cePy8MMP5+mnn27wmsGDB+eRRx7Jr371qzz33HM56KCDsvfee+fFF19cYE/f/e53C82nKZq0rubAAw/MAw88kPXXX7/c/QAAAAAAQEV99NFHufTSSzNmzJj069cvSXLFFVdk3Lhxueqqq7J9r1WTJGeceEz2+MLnkiTXjDoza/ful9vuuj8D9t8zk994Mwfs88VsuemGSZL11l27fvxLr7052267bUaOHFm/7eqrr06PHj3yz3/+MxtttFGSZIMNNsj555//yQ4rz8j6666djh3a5ba77suhB+6XJBl7+93p/6UvpNPKK+WjGR/ngituyH03XZY+vT9bX/fPTz6bX15/S3bts10uvfTSrLfeevnZz36WqqqqbLzxxpkwYULOO++8xR6XmpqarLLKKqmqqkrXrl2TJHV1dZk2bVqOOOKItGr1yTqO9dZbLxdddFF22GGHfPjhh1lppZXqxzjzzDOzxx57JEmmT5+ea665JmPHjs0Xv/hJKDR69OistdZa9fv/+9//zo033pjXXnutfvt3v/vd3H333Rk9enRGjhw5X09LU5NClZ///Oc56KCD8vDDD2fLLbdMdXXD67idcMIJZWkOAAAAAACWtX//+9+pra3NzjvvXL+turo6O+ywQ55//vls36tPkqRP763qn+/8mVWy8frr5vl/TUqSnHDEN/KtYefk3gcfz5d22SEH7PPFbLXZJ2HJU889n/sf/kuDsOHTteeFKr17927wXHV1dQ7ab4/ccNtdOfTA/fLRjI/z23seyNhffBLOTPznS5k5c1b2+MZxDV43u7Y222yxSZLk+eefz+c+97lUVVXVP9+nT5+mHahPeeaZZ3LmmWfm2Wefzbvvvpu6urokyeTJk7PZZpvV7/fpOb300kupra3NDjvsUL9tlVVWycYbb1z/+Omnn06pVKo/JvPMmjUrq6222hL33VhNClXGjh2be+65J+3bt6+/Dto8VVVVQhUAAAAAAJqtUqmUJA0++563/b+3/bd5zx818KvZa9c++cOf/px7H3os5/x8dH56+tAcf8TXU1eqS//+/Re4OqRbt271f+/YseN8zx/ytX7Z9YCjM/XtdzPuocfTrm3b9Nvtk/BnXpDxh2svSveuazR4XduamgZzK6ePPvooe++9d/bcc89cf/31WWONNTJ58uTstddemT17doN9Pz2nRR3neerq6tK6des89dRTad26dYP9FhRKLW1NClV++MMf5swzz8wPfvCD+uU8AAAAAACwIthggw1SU1OTP//5zxk4cGCST24IP378+AY3UH/8qQlZp/snIch770/LP196JZus37P++R7du+bYQQfm2EEHZtg5F+eKsbfm+CO+nm232CS33PtIevbsmTZtGvcx/U69P5sea62Zm+64N3fd/0gO2u9Lqan55GpSm220Xtq2rcnk16dk1z7bLfD1m222WW6//fYG2x5//PHC9WtqajJ37twG21588cW8/fbbOffcc9OjR48kyfjx4xc71vrrr5/q6uo88cQT9a+bNm1aXnzxxey6665Jkm222SZz587N1KlTs8suuxTuaWlpUiIye/bsHHzwwQIVAAAAAABWOB07dsy3vvWtnHzyybn77rszceLEHH300ZkxY0aOPPLI+v3OHHVF/vTwX/K3f/wrh594RlbvvGq+svduSZIhp/849zzwaCZNfj1PT3g+9z3yZDbdoFeS5NuHH5x333033/jGN/LEE0/kpZdeyr333psjjjhiseFAVVVVBn5l71x23c0Z99Bf8j8H7FP/3Mordcx3v3loThx+Qa759e/y75dfzTN/+0d+MeamXPPr3yVJjj322Pz73//O0KFD88ILL2Ts2LEZM2ZM4WPTs2fPfPjhh/nTn/6Ut99+OzNmzMjaa6+dmpqaXHzxxXnppZdyxx135KyzzlrsWCuvvHIOO+ywnHzyybn//vvz97//vf7eLPNWr2y00UY55JBDMmjQoNx6662ZNGlSnnzyyZx33nm58847F9rT0tKklSqHHXZYbrrpppxyyinl7gcAAAAAgBXcpveNXfATa23TuIHeeGbJm1mIc889N3V1dTn00EMzffr09O7dO/fcc08+85nP/P99hh2f/z3jJ3lx0uR8drONcsfoUfWrRubW1eXbp56b16ZMTaeVOmbvvjvlZ8NPSpKs1XWNPPLII/n+97+fvfbaK7Nmzcq6666bvffeu9BihkO+tk9GXnx11l27W3befusGz531vePSZfXOOefno/PS5NeyaqeVs+2Wm+SU449Ikqyzzjq55ZZbcuKJJ+aSSy7JDjvskJEjR+aII44odFx22mmnHHvssTn44IPzzjvv5PTTT8+JJ56Yq6++Oj/84Q9z0UUXZdttt81PfvKT7L///osd74ILLsixxx6b/fbbL506dcr3vve9vPrqq2nXrl39PqNHj87ZZ5+dk046Ka+//npWW2219OnTJ/vss88CezrjjDMyfPjwQvNprCaFKnPnzs3555+fe+65J1tttdV8N6q/4IILytIcAAAAAABUQrt27XLRRRfloosumu+5vjv1Tun1p5Mk++3xhQW+/uKzv7/I8TfccMPceuutC33+gQceWOhzm220Xn39/1ZVVZUTjvxGTjjyGwt9/X777Zf99tuvwbbBgwcvst9Pu/TSS3PppZcm+eSeJ9OmTcs3vvGNHHLIIQ32+/S9Ufr27bvA+7msvPLKueGGG+off/TRRxkxYkSOOeaY+m3V1dUZMWJERowYUainpalJocqECROyzTafJIZ/+9vfytoQAAAAAADQMjzzzDP5xz/+kR122CEffPBBzjzzzCTJl7/85Qp3tmBNClXuv//+cvcBAAAAAABUWL9+/fLwww8v8LlTTjllqdwW5Cc/+UleeOGF1NTUZLvttsvDDz+c1Vdfvex1yqFRocrXvva1xe5TVVWVW265pckNAQAAAAAAlXHllVfm448/XuBznTt3Lnu9bbbZJk899VTZx11aGhWqrLLKKkurDwAAAAAAoMK6d+9e6RaWa40KVUaPHr20+gAAAAAAYEXzfzcmX8D9yWGZKpXpTdiqLKMAAAAAAMB/aV37YVI3J7PrKt0JLd3s2bOTJK1bt16icZp0o3oAAAAAAFicNrM/SIe3ns1bHT+T6s+0S6uqxbxg5szGFZhTpiUwlajbXOa6GHV1dZk9e3ZmzpyZVq2Wz3UcdXV1eeutt9KhQ4e0abNksYhQBQAAAACApaIqpXT7x9WZ1KlXXvm4c5LFpCofTWpcgfffanJvFa/bXOa6GKVSKR9//HHat2+fqqrFpWaV06pVq6yzzjpL3KNQBQAAAACApaZm5tvZ8OHjM7t9l6TVYi699J3xjRv85wc1vbFK120uc12M2traPPTQQ/nCF76Q6urqso5dTjU1NWVZSSNUAQAAAABgqWpVmpN2M95Y/I7t2jVu4A9fbVpDy0Pd5jLXxWjdunXmzJmTdu3aLdehSrksnxc4AwAAAAAAWM4IVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABVQ8VLnkkkvSq1evtGvXLtttt10efvjhQq975JFH0qZNm2y99dZLt0EAAAAAAIAkbSpZ/KabbsqQIUNyySWXZOedd84vf/nL9OvXLxMnTsw666yz0Nd98MEHGTRoUL74xS/mP//5zzLsGAAAGuo5c2xZxnm5LKMAAACwNFU0VLngggty5JFH5qijjkqSjBo1Kvfcc08uvfTSnHPOOQt93Te/+c0MHDgwrVu3zu23377IGrNmzcqsWbPqH0+bNi1JUltbm9ra2iWfxP+N9en/Nmdt07Ys4zTmWFSiZnOv25Lm2ti6bVuXlnnNitZdzDGuSU2D/y6LmkU1h7otaa7lqtuS5lqpuo3+faNVuyWu+X+Fy1q39v+er13Ufs1krsvrz4DltWal6rakuTal7ty2y/4Yl6Pm0qhb93/P1y1ivxXlXLxc121Jc21k3UqdJyr2c7YF/Qwoy+9PnxQuW83Cmsn7yflpKddtSXMtV93mMtfFDtf8Px9vTO9VpVKpPP8KbKTZs2enQ4cO+c1vfpOvfvWr9dv/93//N88++2wefPDBBb5u9OjRueSSS/LYY4/l7LPPzu23355nn312oXWGDx+eESNGzLd97Nix6dChwxLPAwAAAAAAaL5mzJiRgQMH5oMPPkinTp0WuW/FVqq8/fbbmTt3btZcc80G29dcc828+eabC3zNiy++mB/84Ad5+OGH06ZNsdaHDRuWoUOH1j+eNm1aevTokT333HOxB6eo2trajBs3LnvssUeqq6vLMmal9BnbpyzjPDbwseW6ZnOv25Lm2ti6Wwy/pyw1/zZ8r0btX6m6izvGNanJ91f9fs57/7zMzuwF7tMcvq6VqtuS5lquui1prpWq29iaOWftJa6ZJBn2Wlnr1rZql3FbXpQ9JpyQ6rqZy6RmYY2su7z+DCjC987Srdmc6r7Qe/slrrnx+CeXec2lUbeubdtMOvWU9PrRyLT61FUPlqTm8nouXq7rtqS5NrJupc4TlTrGLelnQFl+f0oad4xb0PdO2eq2pLk2tm5Lmmu56jaXuS7GivD5+LwrXBVR0ct/JUlVVVWDx6VSab5tSTJ37twMHDgwI0aMyEYbbVR4/LZt26btApZtV1dXl/0LvDTGXNZmZcH/aGisxhyHStRs7nVb0lwbW3fW3PnPH0u7ZkXrFjzGszN7ofs2h69rpeq2pLmWq25Lmmul6jb6d41F/YO7cYWXSt3qupkL/1Cgmcx1ef8ZsLzVrFTdljTXptRtvZDwYHmvuTTrtpo1a6H7rmjn4uWybkuaayPrVuo8UbGfsy3oZ0BZfn/6pHDZay5WM3k/OT8t5botaa7lqttc5lp42Ob7+Xhj+q5YqLL66qundevW861KmTp16nyrV5Jk+vTpGT9+fJ555pl85zvfSZLU1dWlVCqlTZs2uffee7P77rsvk94BAAAAAICWp1WlCtfU1GS77bbLuHHjGmwfN25cdtppp/n279SpUyZMmJBnn322/s+xxx6bjTfeOM8++2x23HHHZdU6AAAAAADQAlX08l9Dhw7NoYcemt69e6dPnz65/PLLM3ny5Bx77LFJPrkfyuuvv55rr702rVq1yhZbbNHg9V26dEm7du3m2w4AAAAAAFBuFQ1VDj744Lzzzjs588wzM2XKlGyxxRa58847s+666yZJpkyZksmTJ1eyRQAAAAAAgCTLwY3qjzvuuBx33HELfG7MmDGLfO3w4cMzfPjw8jcFAAAAAADwXyp2TxUAAAAAAIDmRKgCAAAAAABQgFAFAAAAAACggIrfUwUAgKWn58yxZRnn5bKMAgAAAM2blSoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFNCm0g0AAEA59Jw5tizjvFyWUQAAAFgRWakCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABbSpdAMAAKx4es4cu8jn27Yu5fzMzRazrsqsuVUL3OflpdAXAAAALAkrVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABTQptINAAAAACuOnjPHLvEYLy95GwAAS4WVKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAACjAjeoBAABgBVSOG8YnbhoP0Bz5GQBLj1AFAAAAAGApEG7AisflvwAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACggDaVbgAAAAAAgOat58yxZRnn5bKMAkuPlSoAAAAAAAAFCFUAAAAAAAAKcPkvAAAAAGCFV47LU7285G0AzZyVKgAAAAAAAAVYqQIAAABLkRv3AgCsOKxUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAD3VAEAAACaNfetAQCWFStVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAKEKoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAACmhT6QYAAAAAAKApes4cW5ZxXi7LKLQEVqoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgHuqAAAAsMwNGFaef45OKMsoAABQjJUqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUECbSjcAAAAA0Bz1nDm2LOO8XJZRAIBlwUoVAAAAAACAAqxUAQAAoMUYMGzR/wxumzY5LcnhQ9tkVuYucJ8JS6EvAACaBytVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAW0qXQDAAAAizJh0uRKtwAAlFHPmWPLMs7LZRkFoHGsVAEAAAAAAChAqAIAAAAAAFCAy38BAAAAAMByzqXzlg9WqgAAAAAAABRgpQoAAEALN2DYkv/TcEIZ+gAAgOWdUAUAAACARXLJGQD4hMt/AQAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoICKhyqXXHJJevXqlXbt2mW77bbLww8/vNB9b7311uyxxx5ZY4010qlTp/Tp0yf33HPPMuwWAAAAAABoqSoaqtx0000ZMmRITj311DzzzDPZZZdd0q9fv0yePHmB+z/00EPZY489cuedd+app57Kbrvtlv79++eZZ55Zxp0DAAAAAAAtTUVDlQsuuCBHHnlkjjrqqGy66aYZNWpUevTokUsvvXSB+48aNSrf+973sv3222fDDTfMyJEjs+GGG+Z3v/vdMu4cAAAAAABoadpUqvDs2bPz1FNP5Qc/+EGD7XvuuWceffTRQmPU1dVl+vTp6dy580L3mTVrVmbNmlX/eNq0aUmS2tra1NbWNqHz+c0bp1zjVVLbtC3LOI05FpWo2dzrtqS5NrZu29alZV6zonUXc4xrUtPgv8uiZlHNoW5Lmmu56rakuVaqbrM5Py2mbttWpQb/XRY1i2o2dVvQ92xatStH0Ubt3pzPE5Wq21zm+vQrUxc9Xqt2Gbdq8tCrb6W6bmZZara481MZ6rakuTa2bsXOE37eLfW6i/t5V/t/z9cu7udiY+qW42dsY2umeX/PNpvvnUqcn1rQXJNU5nfUSs214HjN+fPxxvReVSqVyvOVaKQ33ngj3bt3zyOPPJKddtqpfvvIkSNzzTXX5IUXXljsGD/+8Y9z7rnn5vnnn0+XLl0WuM/w4cMzYsSI+baPHTs2HTp0aPoEAAAAAACAZm/GjBkZOHBgPvjgg3Tq1GmR+1Zspco8VVVVDR6XSqX5ti3IjTfemOHDh+e3v/3tQgOVJBk2bFiGDh1a/3jatGnp0aNH9txzz8UenKJqa2szbty47LHHHqmuri7LmJXSZ2yfsozz2MDHluuazb1uS5prY+tuMfyestT82/C9GrV/peou7hjXpCbfX/X7Oe/98zI7sxe4T3P4ulaqbkuaa7nqtqS5VqpuY2sur+fFtq1KOat3XU4b3yqz6hb8u19zORcvrz8Dimgu3zs5Z+0lLzrstUbt3pzPE5Wq21zmurj3U22rdhm35UXZY8IJC12p0tj3U0s7P5Wjbkuaa2PrVup7x8+7pV+3LOenpHHnqHL8jG1szTTv79nmcJ6oVN2WNNckFfkdtWJzXYwV4fPxeVe4KqJiocrqq6+e1q1b580332ywferUqVlzzTUX+dqbbropRx55ZH7zm9/kS1/60iL3bdu2bdq2nX/ZZnV1ddm/wEtjzGVtVmYtfqcCGnMcKlGzuddtSXNtbN1Zcxcfypa7ZkXrFjzGszN7ofs2h69rpeq2pLmWq25Lmmul6jab81PBurPqqha674o217LXbUHfs1nUB0fFizZq9+Z8nqhU3eYy16Lvp+q6mQv/0LK5nCeacd2WNNfG1q3YecLPu6Vetyznp08Kl73mYjWX95Pz01Kt25LmmqQyv6NWaq6NGLe5fj7emL4rdqP6mpqabLfddhk3blyD7ePGjWtwObD/duONN+bwww/P2LFjs++++y7tNgEAAAAAAJJU+PJfQ4cOzaGHHprevXunT58+ufzyyzN58uQce+yxST65dNfrr7+ea6+9NskngcqgQYNy4YUX5nOf+1z9Kpf27dtnlVVWqdg8AAAAAACAFV9FQ5WDDz4477zzTs4888xMmTIlW2yxRe68886su+66SZIpU6Zk8uTJ9fv/8pe/zJw5c/Ltb3873/72t+u3H3bYYRkzZsyybh8AAAAAAGhBKn6j+uOOOy7HHXfcAp/776DkgQceWPoNAQAAAAAALEDF7qkCAAAAAADQnAhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAPD/2rvXGKvKu23g1zDAAFIODpZD5dRaDZE21CGxSI3lC2prKz0YmhqVICTEGAWiVmjCwYhNH4gx0AK2grWNhvlgk5oUE0hqFQW0JbRprTVBoUMZKCm0aqQMFOb58LzO2wGKCxlm7Zn9+yUk7LXuvdb/TuA/e+aa+14AAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAnqWXQAAAHDu/rC7qewSAAAAqo6VKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABTgmSoAAAAAULIxR5856/m62tb8T05kfMu6tJyo+a/j9nRwXQC0Z6UKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUEDPsgsA4KP7w+6ms54/3qNPNg5Ktv1lb3qdPNo5RQEAAABAN2WlCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAArwoHoAAIAq94fdTWWXAAAAXYJQBbhgfHMOAAAAAHQntv8CAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFNCz7AIAAACoPn/Y3VR2CQAAcM6sVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAz1QBAAAK8QwMAACg2lmpAgAAAAAAUIBQBQAAAAAAoADbfwEAAABQkWw9CUClsVIFAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABXhQPQBdggdUAgAAAFA2oQoA/BeCHAAAKo3PqABQLtt/AQAAAAAAFCBUAQAAAAAAKMD2XwAAAFSNMUefOev5utrW/E9OZHzLurScqDnjmD0XoC4AALoGoQpAF1bGDwXs4QwAAABAtbL9FwAAAAAAQAFCFQAAAAAAgAJs/wVVwHZNAAAAUIzvoQE4GytVAAAAAAAACrBSharlN08AAAAAADgXQhVKV23hRrXNFwAAAACguxCqAAAAFW3M0WfO+xp7zr8MAOh2OuJrbOLrLNXJZ9TqJVShHasoAAAAAADgzIQqwAXjN14AAAAAgO5EqAIA0EmsCAUAAICuTagCAJRGyAAAAAB0JUIVAKgwZQUN1RRwVNNcAQAAgI4jVAEAADiFgBsAADiTHmUXAAAAAAAA0BUIVQAAAAAAAAoQqgAAAAAAABTgmSoAAABVbszRZ877GnvOvwwAAKh4VqoAAAAAAAAUIFQBAAAAAAAoQKgCAAAAAABQgFAFAAAAAACgAKEKAAAAAABAAT3LLgAAADh3Y44+c97X2HP+ZQAAAFQVoQoAAACdriOCwUQ4CABA57L9FwAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABPcsuAAAq1Zijz3TIdfZ0yFUAAAAAKJuVKgAAAAAAAAUIVQAAAAAAAAqw/RcA58SWWAAAAABUK6EKAEAn6YhQcs/5lwEAAAB8REIVqAJWFgCVSn8CAAAAuhKhClWrrB/k+S1l+Gj88J2O5N8TfDT+7wAAANXOg+oBAAAAAAAKEKoAAAAAAAAUYPsv2iljSwfbSAAAAAAA0BUIVQCgwnjmEwAAAEBlsv0XAAAAAABAAVaqAAAAnMIWtQCVwWpqACqNUAUAAKBCCHMAAKCy2f4LAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABXhQPQAAAAD8P2OOPtMh19nTIVcBoNIIVQAAAAC6CD/wB4By2f4LAAAAAACgAKEKAAAAAABAAUIVAAAAAACAAoQqAAAAAAAABQhVAAAAAAAAChCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABpYcqq1evztixY9OnT580NDRky5YtZx3/4osvpqGhIX369MknP/nJrF27tpMqBQAAAAAAqlmpoUpjY2Pmzp2b7373u9m5c2euvfba3HjjjWlqajrj+N27d+dLX/pSrr322uzcuTMLFy7MPffck2effbaTKwcAAAAAAKpNqaHKo48+mjvvvDOzZs3KuHHj8thjj2XkyJFZs2bNGcevXbs2o0aNymOPPZZx48Zl1qxZmTlzZlasWNHJlQMAAAAAANWmZ1k3PnbsWHbs2JEHH3yw3fGpU6dm69atZ3zPtm3bMnXq1HbHrr/++qxbty7Hjx9Pr169TntPS0tLWlpa2l6/8847SZLDhw/n+PHj5zuNJMnx48dz5MiRHDp06Iw1dCU9//1+h1zn0KFDFX3Prn7faprrud63muZa5L49T7bmyJGT6Xm8R06crOmUexbVFe5bTXPtqPtW01zLum93mav+VBn3NdcLe89qu293mav+VBn3raa5nut9q2muHXXf7jLXIv3pXO9bqXOt5PtW01zP9b7VNNeOum9XmeuH6Q4/H3/vvfeSJK2trR86tqa1yKgLoLm5OZ/4xCfyyiuv5Jprrmk7/sgjj+Spp57Km2++edp7Lr/88syYMSMLFy5sO7Z169ZMnjw5zc3NGT58+GnvWbJkSZYuXXphJgEAAAAAAHQLe/fuzaWXXnrWMaWtVPlATU37ZL21tfW0Yx82/kzHP7BgwYLMnz+/7fXJkydz+PDh1NfXn/U+5+Ldd9/NyJEjs3fv3gwYMKBDrgnQEfQnoFLpT0Cl0p+ASqU/AZWqO/Sn1tbWvPfeexkxYsSHji0tVBkyZEhqa2tz4MCBdscPHjyYoUOHnvE9w4YNO+P4nj17pr6+/ozvqaurS11dXbtjgwYN+uiFn8WAAQO67D8aoHvTn4BKpT8BlUp/AiqV/gRUqq7enwYOHFhoXGkPqu/du3caGhqyefPmdsc3b97cbjuw/zRp0qTTxm/atCkTJ07ssnu1AQAAAAAAXUNpoUqSzJ8/P0888UTWr1+fN954I/PmzUtTU1PmzJmT5P+27rr99tvbxs+ZMyd/+ctfMn/+/LzxxhtZv3591q1bl/vuu6+sKQAAAAAAAFWi1GeqTJ8+PYcOHcpDDz2U/fv3Z/z48dm4cWNGjx6dJNm/f3+ampraxo8dOzYbN27MvHnz8sMf/jAjRozIypUr841vfKOsKST5vy3GFi9efNo2YwBl05+ASqU/AZVKfwIqlf4EVKpq6081rR886R0AAAAAAID/qtTtvwAAAAAAALoKoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFXOw5gxY1JTU9Puz4MPPthuTFNTU77yla/koosuypAhQ3LPPffk2LFjJVUMVJuWlpZMmDAhNTU1+d3vftfunP4ElOGrX/1qRo0alT59+mT48OG57bbb0tzc3G6M/gR0tj179uTOO+/M2LFj07dv33zqU5/K4sWLT+s9+hNQhmXLluWaa65Jv379MmjQoDOO0Z+AsqxevTpjx45Nnz590tDQkC1btpRd0gXXs+wCurqHHnoos2fPbnvdv3//tr+fOHEiX/7yl3PJJZfk5ZdfzqFDh3LHHXektbU1q1atKqNcoMo88MADGTFiRH7/+9+3O64/AWWZMmVKFi5cmOHDh2ffvn2577778s1vfjNbt25Noj8B5fjzn/+ckydP5vHHH89ll12WP/7xj5k9e3bef//9rFixIon+BJTn2LFjueWWWzJp0qSsW7futPP6E1CWxsbGzJ07N6tXr87kyZPz+OOP58Ybb8yf/vSnjBo1quzyLpia1tbW1rKL6KrGjBmTuXPnZu7cuWc8//zzz+emm27K3r17M2LEiCTJhg0bMmPGjBw8eDADBgzoxGqBavP8889n/vz5efbZZ3PllVdm586dmTBhQts5/QmoBM8991ymTZuWlpaW9OrVS38CKsby5cuzZs2avP3220l8fgLK95Of/CRz587NP//5z3bH9SegLFdffXWuuuqqrFmzpu3YuHHjMm3atHzve98rsbILy/Zf5+n73/9+6uvrM2HChCxbtqzd0spt27Zl/PjxbV/QkuT6669PS0tLduzYUUa5QJX429/+ltmzZ+dnP/tZ+vXrd9p5/QmoBIcPH87TTz+da665Jr169UqiPwGV45133snFF1/c9lp/AiqV/gSU4dixY9mxY0emTp3a7vjUqVPbdiLoroQq5+Hee+/Nhg0b8sILL+Tuu+/OY489lrvuuqvt/IEDBzJ06NB27xk8eHB69+6dAwcOdHa5QJVobW3NjBkzMmfOnEycOPGMY/QnoEzf+c53ctFFF6W+vj5NTU35xS9+0XZOfwIqwVtvvZVVq1Zlzpw5bcf0J6BS6U9AGf7+97/nxIkTp/WfoUOHdvveI1Q5xZIlS057+Pypf377298mSebNm5frrrsun/3sZzNr1qysXbs269aty6FDh9quV1NTc9o9Wltbz3gc4GyK9qdVq1bl3XffzYIFC856Pf0J6Cjn8vkpSe6///7s3LkzmzZtSm1tbW6//fb85460+hPQUc61PyVJc3Nzbrjhhtxyyy2ZNWtWu3P6E9BRPkp/Ohv9CSjLqX2mGnqPB9Wf4u677863vvWts44ZM2bMGY9//vOfT5Ls2rUr9fX1GTZsWF599dV2Y/7xj3/k+PHjpyV4AB+maH96+OGHs3379tTV1bU7N3HixNx666156qmn9CegQ53r56chQ4ZkyJAhufzyyzNu3LiMHDky27dvz6RJk/QnoEOda39qbm7OlClTMmnSpPzoRz9qN05/AjrS+fz86VT6E1CGIUOGpLa29rRVKQcPHuz2vUeocooPvsn/KHbu3JkkGT58eJJk0qRJWbZsWfbv3992bNOmTamrq0tDQ0PHFAxUjaL9aeXKlXn44YfbXjc3N+f6669PY2Njrr766iT6E9Cxzufz0wcrVFpaWpLoT0DHOpf+tG/fvkyZMiUNDQ158skn06NH+40d9CegI53P56dT6U9AGXr37p2GhoZs3rw5X/va19qOb968OTfffHOJlV14Na3/udcChW3bti3bt2/PlClTMnDgwPzmN7/JvHnzMnHixLZ9wU+cOJEJEyZk6NChWb58eQ4fPpwZM2Zk2rRpWbVqVckzAKrFnj17Mnbs2OzcuTMTJkxIoj8B5Xjttdfy2muv5Qtf+EIGDx6ct99+O4sWLcr+/fvz+uuvp66uTn8CStHc3Jzrrrsuo0aNyk9/+tPU1ta2nRs2bFgSn5+A8jQ1NeXw4cN57rnnsnz58mzZsiVJctlll6V///76E1CaxsbG3HbbbVm7dm3bSt8f//jHef311zN69Oiyy7tgrFT5iOrq6tLY2JilS5empaUlo0ePzuzZs/PAAw+0jamtrc0vf/nL3HXXXZk8eXL69u2bb3/721mxYkWJlQPoT0A5+vbtm5///OdZvHhx3n///QwfPjw33HBDNmzY0LZlof4ElGHTpk3ZtWtXdu3alUsvvbTduQ9+D1F/AsqyaNGiPPXUU22vP/e5zyVJXnjhhXzxi1/Un4DSTJ8+PYcOHcpDDz2U/fv3Z/z48dm4cWO3DlQSK1UAAAAAAAAK6fHhQwAAAAAAABCqAAAAAAAAFCBUAQAAAAAAKECoAgAAAAAAUIBQBQAAAAAAoAChCgAAAAAAQAFCFQAAAAAAgAKEKgAAAAAAAAUIVQAAgG5ryZIlmTBhQtllAAAA3URNa2tra9lFAAAAnKuampqznr/jjjvygx/8IC0tLamvr++kqgAAgO5MqAIAAHRJBw4caPt7Y2NjFi1alDfffLPtWN++fTNw4MAySgMAALop238BAABd0rBhw9r+DBw4MDU1NacdO3X7rxkzZmTatGl55JFHMnTo0AwaNChLly7Nv//979x///25+OKLc+mll2b9+vXt7rVv375Mnz49gwcPTn19fW6++ebs2bOncycMAACUTqgCAABUlV/96ldpbm7OSy+9lEcffTRLlizJTTfdlMGDB+fVV1/NnDlzMmfOnOzduzdJcuTIkUyZMiX9+/fPSy+9lJdffjn9+/fPDTfckGPHjpU8GwAAoDMJVQAAgKpy8cUXZ+XKlbniiisyc+bMXHHFFTly5EgWLlyYT3/601mwYEF69+6dV155JUmyYcOG9OjRI0888UQ+85nPZNy4cXnyySfT1NSUX//61+VOBgAA6FQ9yy4AAACgM1155ZXp0eP//37Z0KFDM378+LbXtbW1qa+vz8GDB5MkO3bsyK5du/Kxj32s3XWOHj2at956q3OKBgAAKoJQBQAAqCq9evVq97qmpuaMx06ePJkkOXnyZBoaGvL000+fdq1LLrnkwhUKAABUHKEKAADAWVx11VVpbGzMxz/+8QwYMKDscgAAgBJ5pgoAAMBZ3HrrrRkyZEhuvvnmbNmyJbt3786LL76Ye++9N3/961/LLg8AAOhEQhUAAICz6NevX1566aWMGjUqX//61zNu3LjMnDkz//rXv6xcAQCAKlPT2traWnYRAAAAAAAAlc5KFQAAAAAAgAKEKgAAAAAAAAUIVQAAAAAAAAoQqgAAAAAAABQgVAEAAAAAAChAqAIAAAAAAFCAUAUAAAAAAKAAoQoAAAAAAEABQhUAAAAAAIAChCoAAAAAAAAFCFUAAAAAAAAK+F/6k7hFCYsuVAAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "df= feature_importances['Past variable importance over time']\n", @@ -2184,18 +1450,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAABmcAAANVCAYAAACNriqBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOzdeXhTZfrG8fsk3TeQHQpSQFllxwUUARUQkJ86oo4gu864DyCuiCw6oKgI4yijjrKoCDOKuKGACogCsijqICgiCGIRAaH7lry/PyAhaVNo0rRNm+/nurhMTt6c8+Y0Pba5+zyvZYwxAgAAAAAAAAAAQLmwVfQEAAAAAAAAAAAAwgnhDAAAAAAAAAAAQDkinAEAAAAAAAAAAChHhDMAAAAAAAAAAADliHAGAAAAAAAAAACgHBHOAAAAAAAAAAAAlCPCGQAAAAAAAAAAgHJEOAMAAAAAAAAAAFCOCGcAAAAAAAAAAADKEeEMAABAGZs3b54sy3L/i4iIUMOGDTVy5Ejt378/6MfLysrS5MmTtXr16qDvu7T27Nkjy7I0b948v5+7evVqWZalN95447RjJ0+eLMuyApjhqY8diue0pBYuXKhZs2ZV9DTCznfffafJkydrz549RR4bMWKEUlJSyn1OlY1lWZo8eXKFHLssr6clva64rmeHDh0K+hwCsW7dOk2ePFlHjx4t8thzzz0X0PU92PPo2bOnevbsWS7zAAAACBThDAAAQDmZO3eu1q9fr5UrV+rmm2/W66+/ru7duyszMzOox8nKytKUKVNCMkioX7++1q9frwEDBlT0VPzSqVMnrV+/Xp06daroqQSMcKZifPfdd5oyZYrPcGbixIl66623yn9SKLFQvp5WlHXr1mnKlCkhEc6cah7PPfdcucwDAAAgUBEVPQEAAIBwcc4556hLly6SpF69esnhcOiRRx7R0qVLNWTIkAqeXdlyOBwqKChQdHS0Lrjggoqejt+SkpIq5byl4x8ux8XFVfQ0qrRAz3GzZs3KYDY4lfz8fHcFI6qu1q1bV/QUAAAATovKGQAAgAri+rD/559/liRNmTJF559/vmrUqKGkpCR16tRJL730kowxXs/75JNP1LNnT9WsWVOxsbE688wzdc011ygrK0t79uxR7dq13ftztVIbMWKEzzn8/vvvioqK0sSJE4s8tmPHDlmWpX/84x/usbfddptat26thIQE1alTR5dcconWrl3r9TxX67IZM2bo0UcfVZMmTRQdHa1Vq1b5bGv2448/auTIkTr77LMVFxen5ORkDRw4UN9++63POefk5GjcuHGqV6+eYmNj1aNHD3311VenP+GSFi9erK5duyo+Pl4JCQnq27dviZ7rq/3QiBEjlJCQoB07dqhv376Kj49X/fr19dhjj0mSNmzYoIsuukjx8fFq3ry55s+f77VPV7u7lStXauTIkapRo4bi4+M1cOBA/fTTT0Xm8PLLL6t9+/aKiYlRjRo1dPXVV2v79u1eY1xz+vbbb9WnTx8lJibq0ksvVc+ePfX+++/r559/9mqx51LS915KSoquuOIKffjhh+rUqZNiY2PVsmVLvfzyy0Xmu3//fv3lL39Ro0aNFBUVpQYNGmjQoEH67bff3GPS0tI0fvx4NWnSRFFRUUpOTtaYMWNKXE12unMya9YsWZalH3/8schz77vvPkVFRXm1ivroo4906aWXKikpSXFxcbrwwgv18ccfez3P1WLqyy+/1KBBg3TGGWcUG7LMmzdP1157raTjgazrvLve/77amlmWpTvuuENz585VixYtFBsbqy5dumjDhg0yxuiJJ55QkyZNlJCQoEsuucTnayvJ6/DF9T5/9dVXS/Q99s4776hr166Ki4tTYmKievfurfXr1/s8X9u2bdMNN9ygatWqqW7duho1apSOHTvmNTYtLU0333yzatasqYSEBF1++eX64YcffM51586dGjx4sOrUqaPo6Gi1atVKzz77rM/X88orr+juu+9WcnKyoqOj9eOPP5boelaS62lJ5iEdv55efvnliouLU61atXTLLbcoPT391F+QQvbt26c//elPSkpKUrVq1XTjjTfq999/dz8+evRo1ahRQ1lZWUWee8kll6hNmzan3P/KlSt15ZVXqmHDhoqJidFZZ52lv/71r17fI5MnT9Y999wjSWrSpIn7nKxevVopKSnatm2b1qxZ497u+f4u6fe763vglVdeUatWrRQXF6f27dvrvffeK9E8JN9tzY4cOaLbbrtNycnJioqKUtOmTTVhwgTl5ub6fXwAAICgMAAAAChTc+fONZLMpk2bvLbPnj3bSDIvvPCCMcaYESNGmJdeesmsXLnSrFy50jzyyCMmNjbWTJkyxf2c3bt3m5iYGNO7d2+zdOlSs3r1avPaa6+ZoUOHmj/++MPk5OSYDz/80Egyo0ePNuvXrzfr1683P/74Y7Hzu/rqq02jRo2Mw+Hw2n7vvfeaqKgoc+jQIWOMMTt27DC33nqrWbRokVm9erV57733zOjRo43NZjOrVq3ymqMkk5ycbHr16mXeeOMNs2LFCrN79273Y3PnznWPX7Nmjbn77rvNG2+8YdasWWPeeustc9VVV5nY2FizY8cO97hVq1YZSaZRo0bmyiuvNO+++6559dVXzVlnnWWSkpLMrl273GMnTZpkCv+o+/e//91YlmVGjRpl3nvvPbNkyRLTtWtXEx8fb7Zt23bKr6Hr2J6vc/jw4SYqKsq0atXKzJ4926xcudKMHDnSSDIPPPCAad68uXnppZfM8uXLzRVXXGEkmc2bN7uf73pfNGrUyIwaNcp88MEH5oUXXjB16tQxjRo1Mn/88Yd77LRp04wkc8MNN5j333/fLFiwwDRt2tRUq1bN/PDDD15zioyMNCkpKWb69Onm448/NsuXLzfbtm0zF154oalXr577PbF+/Xr380ry3jPGmMaNG5uGDRua1q1bmwULFpjly5eba6+91kgya9ascY/75ZdfTP369U2tWrXMzJkzzUcffWQWL15sRo0aZbZv326MMSYzM9N06NDBa8zs2bNNtWrVzCWXXGKcTucpvyYlOSe///67iYqKMhMmTPB6bkFBgWnQoIH505/+5N72yiuvGMuyzFVXXWWWLFli3n33XXPFFVcYu91uPvroI/c413urcePG5r777jMrV640S5cu9TnHgwcPuuf57LPPus/7wYMH3V+vxo0bez3Hte9u3bqZJUuWmLfeess0b97c1KhRw4wdO9ZceeWV5r333jOvvfaaqVu3rmnXrp3XuSrp6/DFn++x1157zUgyffr0MUuXLjWLFy82nTt3NlFRUWbt2rVFzleLFi3Mww8/bFauXGlmzpxpoqOjzciRI93jnE6n6dWrl4mOjjZ///vfzYoVK8ykSZNM06ZNjSQzadIk99ht27aZatWqmbZt25oFCxaYFStWmLvvvtvYbDYzefLkIq8nOTnZDBo0yLzzzjvmvffeM4cPHy7R9ex019OSzuPAgQOmTp06Jjk52cydO9csW7bMDBkyxJx55plFriu+eL7n7rnnHrN8+XIzc+ZMEx8fbzp27Gjy8vKMMcZ8/fXXRpJ58cUXvZ6/bds293vwVObMmWOmT59u3nnnHbNmzRozf/580759e9OiRQv3Mfbt22fuvPNOI8ksWbLEfU6OHTtmvvzyS9O0aVPTsWNH9/Yvv/zSGOPf97skk5KSYs477zzzn//8xyxbtsz07NnTREREuN+Dp5qHMcb06NHD9OjRw73P7Oxs065dOxMfH2+efPJJs2LFCjNx4kQTERFh+vfv73UeSnJ8AACAYCCcAQAAKGOuD+E3bNhg8vPzTXp6unnvvfdM7dq1TWJiojlw4ECR5zgcDpOfn2+mTp1qatas6f7g6o033jCSzNatW4s93u+//17kw8xTeeedd4wks2LFCvc214fX11xzTbHPKygoMPn5+ebSSy81V199tXu7K4Bp1qyZ+wO9wo95hjO+9puXl2fOPvtsM3bsWPd21wetnTp18vogb8+ePSYyMtLcdNNN7m2Fw5m9e/eaiIgIc+edd3odKz093dSrV89cd911xc7H89iFwxlJ5s0333Rvy8/PN7Vr1zaS3B9KGmPM4cOHjd1uN+PGjXNvc70vPM+dMcZ8/vnnRpJ59NFHjTHG/PHHHyY2NrbIB4h79+410dHRZvDgwUXm9PLLLxd5DQMGDCgSBPhS3HvPmOPhTExMjPn555/d27Kzs02NGjXMX//6V/e2UaNGmcjISPPdd98Ve5zp06cbm81WJLR0vceXLVtW7HP9OSd/+tOfTMOGDb3Cx2XLlhlJ5t133zXGHP/guEaNGmbgwIFFzkX79u3Neeed597mem89/PDDxc7P03//+99iP4AvLpypV6+eycjIcG9bunSpkWQ6dOjg9fWYNWuWkWS++eYbv1+HLyX9HnM4HKZBgwambdu2Xuc1PT3d1KlTx3Tr1s29zXW+ZsyY4XWs2267zcTExLiP88EHHxhJZvbs2V7j/v73vxe5nvXt29c0bNjQ/UG8yx133GFiYmLMkSNHvF7PxRdffMrXbUzx17NTXU9LOo/77rvPWJZV5Lrdu3dvv8IZz+uhMScDsldffdW9rUePHqZDhw5e42699VaTlJRk0tPTT3kcT06n0+Tn55uff/7ZSDJvv/22+7EnnnjCSDK7d+8u8rw2bdp4hSIu/ny/SzJ169Y1aWlp7m0HDhwwNpvNTJ8+vUTzKBzO/Otf/zKSzH/+8x+vcY8//niR//+V9PgAAAClRVszAACAcnLBBRcoMjJSiYmJuuKKK1SvXj198MEHqlu3rqTj7couu+wyVatWTXa7XZGRkXr44Yd1+PBhHTx4UJLUoUMHRUVF6S9/+Yvmz5/vs/2Vv/r166d69epp7ty57m3Lly/Xr7/+qlGjRnmN/de//qVOnTopJiZGERERioyM1Mcff1ykvZYk/d///Z8iIyNPe/yCggJNmzZNrVu3VlRUlCIiIhQVFaWdO3f63O/gwYO9WnI1btxY3bp106pVq4o9xvLly1VQUKBhw4apoKDA/S8mJkY9evQIeLFvy7LUv39/9/2IiAidddZZql+/vjp27OjeXqNGDdWpU8fdws5T4fWGunXrpsaNG7tfz/r165WdnV2kNV2jRo10ySWX+GxXdc011/j1Okry3nPp0KGDzjzzTPf9mJgYNW/e3Ou1ffDBB+rVq5datWpV7DHfe+89nXPOOerQoYPX16Rv375FWsgV5s85GTlypH755Rd99NFH7m1z585VvXr11K9fP0nHFxY/cuSIhg8f7jUXp9Opyy+/XJs2bSrSesnfc+yPXr16KT4+3n3fdR779evn9d53bXed+0Behy+n+x77/vvv9euvv2ro0KGy2U7+SpmQkKBrrrlGGzZsKNJa6//+7/+87rdr1045OTnu95dr34W/HwYPHux1PycnRx9//LGuvvpqxcXFeb3O/v37KycnRxs2bPB6TnFfK3+uZ4X5M49Vq1apTZs2at++/Slf2+kUPjfXXXedIiIivK59f/vb37R161Z9/vnnko63EnvllVc0fPhwJSQknHL/Bw8e1C233KJGjRq5z0fjxo0lqUTn5FT8/X7v1auXEhMT3ffr1q1b7DW0JD755BPFx8dr0KBBXttd15DC19FgHx8AAMAXwhkAAIBysmDBAm3atElfffWVfv31V33zzTe68MILJUkbN25Unz59JEkvvviiPv/8c23atEkTJkyQJGVnZ0s6voD4Rx99pDp16uj2229Xs2bN1KxZM82ePTvgeUVERGjo0KF66623dPToUUnH18qoX7+++vbt6x43c+ZM3XrrrTr//PP15ptvasOGDdq0aZMuv/xy9/w81a9fv0THHzdunCZOnKirrrpK7777rr744gtt2rRJ7du397nfevXq+dx2+PDhYo/hWufk3HPPVWRkpNe/xYsXe62p4I+4uDjFxMR4bYuKilKNGjWKjI2KilJOTo7Pufva5no9rv/6Op8NGjQo8rrj4uKUlJRU4tdQ0veeS82aNYvsIzo62mvc77//roYNG57yuL/99pu++eabIl+PxMREGWNO+TXx55z069dP9evXd4ePf/zxh9555x0NGzZMdrvdPRdJGjRoUJH5PP744zLG6MiRI17HKen7OxCF3z9RUVGn3O56XwXyOnwp7XvS6XTqjz/+8Npe+H0THR0t6eT76/Dhw4qIiCgyrvBcDh8+rIKCAj3zzDNFXqMrKC383vE1T3+vZ4X5M4/Dhw8Xe079UXi863x5vt+vvPJKpaSkuNe9mTdvnjIzM3X77befct9Op1N9+vTRkiVLdO+99+rjjz/Wxo0b3QFTSc7Jqfj7/V6S64w/XF8Dz9BRkurUqaOIiIgi19FgHx8AAMCXiIqeAAAAQLho1aqVunTp4vOxRYsWKTIyUu+9957Xh/1Lly4tMrZ79+7q3r27HA6HNm/erGeeeUZjxoxR3bp19ec//zmguY0cOVJPPPGEFi1apOuvv17vvPOOxowZ4/7wWpJeffVV9ezZU3PmzPF6bnGLWhf+EKw4r776qoYNG6Zp06Z5bT906JCqV69eZPyBAwd8bvP1YZpLrVq1JElvvPGG+y/BQ0Vxr+ess86SdPJDwtTU1CLjfv31V/drcynpeXfx571XUrVr19Yvv/xyyjG1atVSbGysXn755WIfL44/58Rut2vo0KH6xz/+oaNHj2rhwoXKzc3VyJEjixzrmWee0QUXXODzmK4KNxd/z3N5COR1+HK677HTnX+bzaYzzjijxPN27bOgoECHDx/2+l4uPJczzjjD/TUtLnBo0qSJ131fXyt/r2eF+TOPmjVrFntO/XHgwAElJye77/s6XzabTbfffrsefPBBPfXUU3ruued06aWXqkWLFqfc9//+9z99/fXXmjdvnoYPH+7e/uOPP/o1x+KU5vs9GGrWrKkvvvhCxhiv98PBgwdVUFBQ5scHAADwhcoZAACAEGBZliIiIrzCkOzsbL3yyivFPsdut+v88893/4X0l19+KanoX6SXRKtWrXT++edr7ty5Pj+8ds3RtW+Xb775RuvXry/xcXzxtd/3339f+/fv9zn+9ddflzHGff/nn3/WunXr1LNnz2KP0bdvX0VERGjXrl3q0qWLz38V5bXXXvO6v27dOv3888/u19O1a1fFxsbq1Vdf9Rr3yy+/6JNPPtGll15aouMU91ffgbz3Tqdfv35atWqVvv/++2LHXHHFFdq1a5dq1qzp8+uRkpJS7HP9PScjR45UTk6OXn/9dc2bN09du3ZVy5Yt3Y9feOGFql69ur777rti3x+uKhV/BfL9GKhgvY7TfY+1aNFCycnJWrhwode4zMxMvfnmm+ratavi4uL8mnuvXr0kFf1+WLhwodf9uLg49erVS1999ZXatWvn8zWeKqh1Ken1rLivnz/z6NWrl7Zt26avv/76lK/tdAqfm//85z8qKCgocu276aabFBUVpSFDhuj777/XHXfccdp9uwKLwufk+eefLzL2VO/p4q4zpfl+L44/31uXXnqpMjIyioTOCxYscD8OAABQ3qicAQAACAEDBgzQzJkzNXjwYP3lL3/R4cOH9eSTTxb5oOxf//qXPvnkEw0YMEBnnnmmcnJy3H+JfNlll0mSEhMT1bhxY7399tu69NJLVaNGDdWqVeu0H36NGjVKf/3rX/Xrr7+qW7duRf7S+oorrtAjjzyiSZMmqUePHvr+++81depUNWnSRAUFBQG/9iuuuELz5s1Ty5Yt1a5dO23ZskVPPPFEsW2xDh48qKuvvlo333yzjh07pkmTJikmJkYPPPBAscdISUnR1KlTNWHCBP3000+6/PLLdcYZZ+i3337Txo0bFR8frylTpgT8Gkpj8+bNuummm3Tttddq3759mjBhgpKTk3XbbbdJkqpXr66JEyfqwQcf1LBhw3TDDTfo8OHDmjJlimJiYjRp0qQSHadt27ZasmSJ5syZo86dO8tms6lLly4lfu/5Y+rUqfrggw908cUX68EHH1Tbtm119OhRffjhhxo3bpxatmypMWPG6M0339TFF1+ssWPHql27dnI6ndq7d69WrFihu+++W+eff77P/ft7Tlq2bKmuXbtq+vTp2rdvn1544QWvxxMSEvTMM89o+PDhOnLkiAYNGqQ6dero999/19dff63ff/+9SIVFSZ1zzjmSpBdeeEGJiYmKiYlRkyZNShQg+CtYr+N032M2m00zZszQkCFDdMUVV+ivf/2rcnNz9cQTT+jo0aN67LHH/J57nz59dPHFF+vee+9VZmamunTpos8//9xnSDh79mxddNFF6t69u2699ValpKQoPT1dP/74o95991198sknpz1eSa9np7qelnQeY8aM0csvv6wBAwbo0UcfVd26dfXaa69px44dfp2jJUuWKCIiQr1799a2bds0ceJEtW/fXtddd53XuOrVq2vYsGGaM2eOGjdurIEDB5523y1btlSzZs10//33yxijGjVq6N1339XKlSuLjG3btq2k41+H4cOHKzIyUi1atFBiYqLatm2rRYsWafHixWratKliYmLUtm3bUn2/F+dU8yhs2LBhevbZZzV8+HDt2bNHbdu21WeffaZp06apf//+7v9/AgAAlCsDAACAMjV37lwjyWzatOmU415++WXTokULEx0dbZo2bWqmT59uXnrpJSPJ7N692xhjzPr1683VV19tGjdubKKjo03NmjVNjx49zDvvvOO1r48++sh07NjRREdHG0lm+PDhp53nsWPHTGxsrJFkXnzxxSKP5+bmmvHjx5vk5GQTExNjOnXqZJYuXWqGDx9uGjdu7B63e/duI8k88cQTRfbhemzu3LnubX/88YcZPXq0qVOnjomLizMXXXSRWbt2renRo4fp0aOHe9yqVauMJPPKK6+Yu+66y9SuXdtER0eb7t27m82bN3sdZ9KkScbXj7pLly41vXr1MklJSSY6Oto0btzYDBo0yHz00UenPDeuY69atcq9bfjw4SY+Pr7I2B49epg2bdoU2d64cWMzYMAA933X+2LFihVm6NChpnr16iY2Ntb079/f7Ny5s8jz//3vf5t27dqZqKgoU61aNXPllVeabdu2eY0pbk7GGHPkyBEzaNAgU716dWNZltf5Kcl7z9dr8HzNnl8rY4zZt2+fGTVqlKlXr56JjIw0DRo0MNddd5357bff3GMyMjLMQw89ZFq0aOF+XW3btjVjx441Bw4c8Pk6/D0nLi+88IKRZGJjY82xY8d8jlmzZo0ZMGCAqVGjhomMjDTJyclmwIAB5r///a97jOu99fvvv592fi6zZs0yTZo0MXa73ev9X/h7xxhjJJnbb7/da1tx31Ou96Xn/Er6Onzx53vMmOPfT+eff76JiYkx8fHx5tJLLzWff/6515jizpfr/e/5/jp69KgZNWqUqV69uomLizO9e/c2O3bsMJLMpEmTipyTUaNGmeTkZBMZGWlq165tunXrZh599NHTnh9jSn49M+bU19OSzMMYY7777jvTu3dvExMTY2rUqGFGjx5t3n777SLXFV9c53DLli1m4MCBJiEhwSQmJpobbrjB6/vJ0+rVq40k89hjj51y377mmJiYaM444wxz7bXXmr179/o8/w888IBp0KCBsdlsXq9hz549pk+fPiYxMdFI8jqXJf1+9/U9YMzx60/h/5cVNw9f16TDhw+bW265xdSvX99ERESYxo0bmwceeMDk5OR4jfPn+AAAAKVhGeNRhw4AAACgXMybN08jR47Upk2bKrStGuCyevVq9erVS//97381aNCgip4OSuHuu+/WnDlztG/fvjKp0gIAAEDp0dYMAAAAAIAqYMOGDfrhhx/03HPP6a9//SvBDAAAQAgjnAEAAAAAoAro2rWr4uLidMUVV+jRRx+t6OkAAADgFGhrBgAAAAAAAAAAUI5sFT0BAAAAAAAAAACAcEI4AwAAAAAAAAAAUI4IZwAAAAAAAAAAAMpRREVPoLJyOp369ddflZiYKMuyKno6AAAAAAAAAACgAhljlJ6ergYNGshmO3VtDOFMgH799Vc1atSooqcBAAAAAAAAAABCyL59+9SwYcNTjiGcCVBiYqKk4yc5KSmp1PvLz8/XihUr1KdPH0VGRpZ6fwAQLFyfAIQqrk8AQhXXJwChiusTgFBVVa5PaWlpatSokTs/OBXCmQC5WpklJSUFLZyJi4tTUlJSpX7zAah6uD4BCFVcnwCEKq5PAEIV1ycAoaqqXZ9KshTKqZueAQAAAAAAAAAAIKgIZwAAAAAAAAAAAMoR4QwAAAAAAAAAAEA5Ys0ZAAAAAAAAAEDIM8aooKBADoejoqeCIMvPz1dERIRycnJC+utrt9sVERFRojVlTodwBgAAAAAAAAAQ0vLy8pSamqqsrKyKngrKgDFG9erV0759+4ISfJSluLg41a9fX1FRUaXaD+EMAAAAAAAAACBkOZ1O7d69W3a7XQ0aNFBUVFTIf4AP/zidTmVkZCghIUE2W2iuxmKMUV5enn7//Xft3r1bZ599dqnmSjgDAAAAAAAAAAhZeXl5cjqdatSokeLi4ip6OigDTqdTeXl5iomJCdlwRpJiY2MVGRmpn3/+2T3fQIXuqwQAAAAAAAAA4IRQ/tAe4SNY70PezQAAAAAAAAAAAOWIcAYAAAAAAAAAAKAcEc4AAAAAAAAAAMKCw+HQ6tWr9frrr2v16tVyOBwVPSWUgmVZWrp0aUVPIyCEMwAAAAAAAACAKm/JkiVKSUlRr169NHjwYPXq1UspKSlasmRJRU+t0unZs6fGjBnjtW316tWyLEtHjx4N+vEmT56sDh06FNmempqqfv36Bf145YFwBgAAAAAAAABQpS1ZskSDBg3SL7/84rV9//79GjRoEAFNJVWvXj1FR0dX9DQCQjgDAAAAAAAAAKiyHA6H/va3v8kYU+Qx17YxY8aUSYuznj176s4779SYMWN0xhlnqG7dunrhhReUmZmpkSNHKjExUc2aNdMHH3zgfs53332n/v37KyEhQXXr1tXQoUN16NAh9+MffvihLrroIlWvXl01a9bUFVdcoV27drkf37NnjyzL0pIlS9SrVy/FxcWpffv2Wr9+fYnmfPjwYd1www1q2LCh4uLi1LZtW73++uvux0eMGKE1a9Zo9uzZsixLlmVpz5496tWrlyTpjDPOkGVZGjFihKTj53jGjBlq2rSpYmNj1b59e73xxhvu/a1evVp2u11r1qzReeedp7i4OHXr1k3ff/+9JGnevHmaMmWKvv76a/fx5s2bJ6loW7Nvv/1Wl1xyiWJjY1WzZk395S9/UUZGhtfcr7rqKj355JOqX7++atasqdtvv135+fklOjfBRDgDAAAAAAAAAKh0unTpooYNG572X7169YpUzHgyxmjfvn2qV69eifbXpUsXv+Y5f/581apVSxs3btSdd96pW2+9Vddee626deumL7/8Un379tXQoUOVlZWl1NRU9ejRQx06dNDmzZv14Ycf6rffftN1113n3l9mZqbGjRunTZs26eOPP5bNZtPVV18tp9PpddwJEyZo/Pjx2rp1q5o3b64bbrhBBQUFp51vTk6OOnfurPfee0//+9//9Je//EVDhw7VF198IUmaPXu2unbtqptvvlmpqalKTU1Vo0aN9Oabb0qSvv/+e6Wmpmr27NmSpIceekhz587VnDlztG3bNo0dO1Y33nij1qxZ43XcRx99VE888YQ2b96siIgIjRo1SpJ0/fXX6+6771abNm3cx7v++uuLzDsrK0uXX365zjjjDG3atEn//e9/9dFHH+mOO+7wGrdq1Srt2rVLq1at0vz58zVv3jx32FOeIsr9iAAAAAAAAAAAlNKBAwe0f//+oO3PszolmNq3b6+HHnpIkvTAAw/oscceU61atXTzzTdLkh5++GHNmTNH33zzjZYtW6ZOnTpp2rRp7ue//PLLatSokX744Qc1b95c11xzjdf+X3rpJdWpU0ffffedzjnnHPf28ePHa8CAAZKkKVOmqE2bNvrxxx/VsmXLU843OTlZ48ePd9+/88479eGHH+q///2vzj//fFWrVk1RUVGKi4tTvXr13ONq1KghSapTp46qV68u6XiQNHPmTH3yySfq2rWrJKlp06b67LPP9Pzzz6tHjx7u5z/00EPq0aOHbDab7r//fg0YMEA5OTmKjY1VQkKCIiIivI5X2Guvvabs7GwtWLBA8fHxkqR//vOfGjhwoB5//HHVrVtX0vHKnn/+85+y2+1q2bKlBgwYoI8//tj99SgvhDMAAAAAAAAAgErnVB/Ue8rNzS1R8FKrVq0SrV9S0uO6tGvXzn3bbrerZs2aatu2rXubKzQ4ePCgtmzZolWrVikhIaHIfnbt2qXmzZtr165dmjhxojZs2KBDhw65K2b27t3rFc54Hrd+/fruY5wunHE4HHrssce0ePFi7d+/X7m5ucrNzXUHHv747rvvlJOTo969e3ttz8vLU8eOHb22tWnTxud8zzzzzBIda/v27Wrfvr3XPC+88EI5nU59//337vPcpk0b2e12r2N9++23/r2wICCcAQAAAAAAAABUOps3by7ROIfDoZSUFO3fv9/nujOWZalhw4bavXu314f2wRIZGVnkeJ7bLMuSJDmdTjmdTnelR2GuwGLgwIFq1KiRXnzxRTVo0EBOp1PnnHOO8vLyij2u5zFO56mnntLTTz+tWbNmqW3btoqPj9eYMWOK7L8kXMd7//33lZyc7PVY4SAs0Pm6GGPczyvMc7uvr4c/xwkWwhkAAAAAAAAAQJVlt9s1e/ZsDRo0SJZleQU0rg/tZ82aVSbBjL86deqkN998UykpKYqIKPrx/eHDh7V9+3Y9//zz6t69uyTps88+C+oc1q5dqyuvvFI33nijpOMByc6dO9WqVSv3mKioKDkcDq/nRUVFSZLX9tatWys6Olp79+71amHmL1/HK6x169aaP3++MjMz3dUzn3/+uWw2m5o3bx7wscuKraInAAAAAAAAAABAWfrTn/6kN954o0j1RsOGDfXGG2/oT3/6UwXNzNvtt9+uI0eO6IYbbtDGjRv1008/acWKFRo1apQcDofOOOMM1axZUy+88IJ+/PFHffLJJxo3blxQ53DWWWdp5cqVWrdunbZv366//vWvOnDggNeYlJQUffHFF9qzZ4+7tVrjxo1lWZbee+89/f7778rIyFBiYqLGjx+vsWPHav78+dq1a5e++uorPfvss5o/f36J55SSkqLdu3dr69atOnTokHJzc4uMGTJkiGJiYjR8+HD973//06pVq3TnnXdq6NCh7pZmoYRwBgAAAAAAAABQ5f3pT3/Snj17tGrVKi1cuFCrVq3S7t27QyaYkaQGDRro888/l8PhUN++fXXOOefob3/7m6pVqyabzSabzaZFixZpy5YtOuecczR27Fg98cQTQZ3DxIkT1alTJ/Xt21c9e/ZUvXr1dNVVV3mNGT9+vOx2u1q3bq3atWtr7969Sk5O1pQpU3T//ferbt26uuOOOyRJjzzyiB5++GFNnz5drVq1Ut++ffXuu++qSZMmJZ7TNddco8svv1y9evVS7dq19frrrxcZExcXp+XLl+vIkSM699xzNWjQIF166aX65z//WarzUVYs46vJHk4rLS1N1apV07Fjx5SUlFTq/eXn52vZsmXq379/kZ53AFCRuD4BCFVcnwCEKq5PAEIV1ydUVjk5Odq9e7eaNGmimJiYip4OyoDT6VRaWpqSkpJks4V2Tcmp3o/+5Aah/SoBAAAAAAAAAACqGMIZAAAAAAAAAADCRL9+/ZSQkODz37Rp0yp6emEjoqInAAAAAAAAAAAAyse///1vZWdn+3ysRo0a5Tyb8EU4AwAAAAAAAABAmEhOTq7oKUC0NQMAAAAAAAAAAChXhDMAAAAAAAAAAADliHAGAAAAAIAgcTgcWrNmjT799FOtWbNGDoejoqcEAACAEEQ4AwAAAABAECxZskQpKSnq3bu3Zs6cqd69eyslJUVLliyp6KkBAAAgxBDOAAAAAABQSkuWLNGgQYP0yy+/eG3fv3+/Bg0aREADAAAAL4QzAAAAAACUgsPh0N/+9jcZY4o85to2ZswYWpwBAIByN3nyZHXo0KGipwEfIip6AgAAAAAAVGZr164tUjHjyRijffv2ae3aterZs2f5TQwAgDCQcv/75Xq8PY8N8Gt8z5491aFDB82aNatsJoRKi3AGAAAAAAA/bG/Zyuv+5rS0Ej1v8+AhqpuU5L7fasf2oM4LAABUPsYYORwORUTwUX24oa0ZAAAAAAClUDvCHtRxAACgahgxYoTWrFmj2bNny7IsWZalefPmybIsLV++XF26dFF0dLTWrl0rY4xmzJihpk2bKjY2Vu3bt9cbb7zh3tfq1atlWZY+/vhjdenSRXFxcerWrZu+//57r2M+9thjqlu3rhITEzV69Gjl5OSU98tGCRHOAAAAAABQCp1j41Q3IkJWMY9bkupFRKhzbFx5TgsAAFSw2bNnq2vXrrr55puVmpqq1NRUNWrUSJJ07733avr06dq+fbvatWunhx56SHPnztWcOXO0bds2jR07VjfeeKPWrFnjtc8JEyboqaee0ubNmxUREaFRo0a5H/vPf/6jSZMm6e9//7s2b96s+vXr67nnnivX14ySo1YKAAAAAIBSsFuWHqxTV2N+3V/kMVdg80CdurJbxcU3AACgKqpWrZqioqIUFxenevXqSZJ27NghSZo6dap69+4tScrMzNTMmTP1ySefqGvXrpKkpk2b6rPPPtPzzz+vHj16uPf597//3X3//vvv14ABA5STk6OYmBjNmjVLo0aN0k033SRJevTRR/XRRx9RPROiqJwBAAAAAKCUeicmalaDZNW0e7cuqxsRoVkNktU7MbGCZgYAAEJRly5d3Le/++475eTkqHfv3kpISHD/W7BggXbt2uX1vHbt2rlv169fX5J08OBBSdL27dvd4Y5L4fsIHVTOAAAAAAAQBL0TE1XNZtOIX/ZJOv7XkCuaNFWEjb+LBAAA3uLj4923nU6nJOn9999XcnKy17jo6Giv+5GRke7b1omqXNfzUbkQzgAAAAAAECQ5xrhvOyU5xC/eAACEs6ioKDkcjlOOad26taKjo7V3716vFmb+atWqlTZs2KBhw4a5t23YsCHg/aFs8TMiAAAAAABBkm2che4bRRczFgAAVH0pKSn64osvtGfPHiUkJPiscklMTNT48eM1duxYOZ1OXXTRRUpLS9O6deuUkJCg4cOHl+hYf/vb3zR8+HB16dJFF110kV577TVt27ZNTZs2DfbLQhAQzgAAAAAAECTZTuN1P8vpVPVC69AAAIDg2fPYgIqewimNHz9ew4cPV+vWrZWdna25c+f6HPfII4+oTp06mj59un766SdVr15dnTp10oMPPljiY11//fXatWuX7rvvPuXk5Oiaa67RrbfequXLlwfr5SCICGcAAAAAAAiSnEJ/DZtFD3gAAMJa8+bNtX79eq9tI0aMKDLOsizddddduuuuu3zup2fPnjLG+49AOnToUGTbgw8+WCTQefzxxwOYOcoaqxICAAAAABAkWYXbmhHOAAAAwAfCGQAAAAAAgqRIWzNDOAMAAICiCGcAAAAAAAiSnCKVM6aYkQAAAAhnhDMAAAAAAARJ4TZmrDkDAAAAXwhnAAAAAAAIkqxClTKsOQMAAABfCGcAAAAAAAiS7EJtzVhzBgAAAL4QzgAAAAAAECQ5tDUDAABACRDOAAAAAAAQJNmF2poVbnMGAAAASCEQzjz33HNq0qSJYmJi1LlzZ61du7bYsUuWLFHv3r1Vu3ZtJSUlqWvXrlq+fHmRcW+++aZat26t6OhotW7dWm+99VapjgsAAAAAQEkUbmvGmjMAAADwpULDmcWLF2vMmDGaMGGCvvrqK3Xv3l39+vXT3r17fY7/9NNP1bt3by1btkxbtmxRr169NHDgQH311VfuMevXr9f111+voUOH6uuvv9bQoUN13XXX6Ysvvgj4uAAAAAAAlEThNmaFwxoAAICSmDx5sizLkmVZmjVrlnu7ZVlaunRphc3rVHr27Ome89atWyt6OiEvoiIPPnPmTI0ePVo33XSTJGnWrFlavny55syZo+nTpxcZ7/kmlKRp06bp7bff1rvvvquOHTu6x/Tu3VsPPPCAJOmBBx7QmjVrNGvWLL3++usBHRcAAAAAgJLIMYXbmhHOAABQpiZXK+fjHSu3Q7Vp00YfffSRkpKSyvQ427Zt08MPP6wtW7bo559/1tNPP60xY8Z4jZk+fbqWLFmiHTt2KDY2Vt26ddPjjz+uFi1auMcsWbJEu3bt0nnnnVem860qKiycycvL05YtW3T//fd7be/Tp4/WrVtXon04nU6lp6erRo0a7m3r16/X2LFjvcb17dvXHewEetzc3Fzl5ua676elpUmS8vPzlZ+fX6L5noprH8HYFwAEE9cnAKGK6xOAiuKIji72sSJrzsgqdjzXLwDljZ+fUFnl5+fLGCOn0ylnoT98KO/WUIWPX1aMMYqIiFCdOnWKHNfXeSiNjIwMNWnSRNdcc43uvvtu97n2tHr1at16660699xzVVBQoIceekh9+vTR//73P8XHx0uSqlevrpo1awY0R3PiD1x8HTvUOJ1OGWOUn58vu93u9Zg/19cKC2cOHTokh8OhunXrem2vW7euDhw4UKJ9PPXUU8rMzNR1113n3nbgwIFT7jPQ406fPl1Tpkwpsn3FihWKi4sr0XxLYuXKlUHbFwAEE9cnAKGK6xOAcje16O+GLpnXXy85Ctz3Dzc+U7uKGb9r2bKgTw0ASoKfn1DZREREqF69esrIyFBeXp7XY9XLeS6uP9oviUWLFunBBx/U9u3bFe3xxxrDhg1TXFyc/vWvfxX73NzcXDkcDp/Hy87Odm+fNGmS3n//ff3666+qU6eOrr32Wt17772KjIx0j3/yySf1/PPPKycnR1dffbVq1Kihjz/+2L0Oe4sWLfTQQw9Jku6//37l5OQUOe7ixYu97s+ePVtnn322Pv30U1144YXu7RkZGZKkzMxMv86VS3p6ut/PKW95eXnKzs7Wp59+qoKCAq/HsrKySryfCm1rJh3vkefJGFNkmy+vv/66Jk+erLffftudHvqzT3+P+8ADD2jcuHHu+2lpaWrUqJH69OkTlLKy/Px8rVy5Ur179/b6xgGAisb1CUCo4voEoKJ83+Vcn9uNMV4dFyRJu3ap2cOTfI5vsXlTsKcGAKfEz0+orHJycrRv3z4lJCQoJiamQufiz2fBQ4cO1f3336/Vq1fr2muvlXS8eGD58uVatmzZKfcVHR0tu93uc0xsbKx7e61atTRv3jw1aNBA3377rf7617+qVq1auueeeyRJr732mp566in985//1IUXXqjFixdr5syZatKkic9922w2xcTEnPZ1Hjx4UJLUqFEjr7EJCQmSpPj4eL/OlTFG6enpSkxMLFE+UJFycnIUGxuriy++uMj70Z9AqsLCmVq1aslutxepVjl48GCRqpbCFi9erNGjR+u///2vLrvsMq/H6tWrd8p9Bnrc6Ohor3TTJTIyMqj/Mwv2/gAgWLg+AQhVXJ8AlDd74QDmhGwfLTiyHI5ix3PtAlBR+PkJlY3D4ZBlWbLZbLLZyruRmTd/jh8fH6/Bgwdr/vz5uv766yUdLzpo2LChLrnkklOGEK7HfB3P8zxMnDjRvb1p06b64YcftHjxYt13332SpGeffVajR4/W6NGjJR2vtFm5cqUyMjKKfS2uc10cY4zGjx+viy66SO3atSsyt8JzLAlXK7PTHTsU2Gw2WZbl81rqz7W1wl5lVFSUOnfuXKSMcuXKlerWrVuxz3v99dc1YsQILVy4UAMGDCjyeNeuXYvsc8WKFe59BnpcAAAAAABOxVc442sbAAAIHzfffLNWrFih/fv3S5Lmzp2rESNGBK065I033tBFF12kevXqKSEhQRMnTtTevXvdj3///fc677zzvJ5T+L6/7rjjDn3zzTd6/fXXS7WfcFehbc3GjRunoUOHqkuXLuratateeOEF7d27V7fccouk463E9u/frwULFkg6HswMGzZMs2fP1gUXXOCufomNjVW1atUkSX/729908cUX6/HHH9eVV16pt99+Wx999JE+++yzEh8XAAAAAAB/ZRsflTOEMwAAhLWOHTuqffv2WrBggfr27atvv/1W7777blD2vWHDBv35z3/WlClT1LdvX1WrVk2LFi3SU0895TXO1xIfgbrzzjv1zjvv6NNPP1XDhg0D3g8qOJy5/vrrdfjwYU2dOlWpqak655xztGzZMjVu3FiSlJqa6pXyPf/88yooKNDtt9+u22+/3b19+PDhmjdvniSpW7duWrRokR566CFNnDhRzZo10+LFi3X++eeX+LgAAAAAAPgr21n0g47sUnz4AQAAqoabbrpJTz/9tPbv36/LLrtMjRo1Csp+P//8czVu3FgTJkxwb/v555+9xrRo0UIbN27U0KFD3ds2b97s97GMMbrzzjv11ltvafXq1WrSpEngE4ekCg5nJOm2227Tbbfd5vMxV+Disnr16hLtc9CgQRo0aFDAxwUAAAAAwF++WpjlGaMCYxQR4gvbAgCAsjNkyBCNHz9eL774ortLVDCcddZZ2rt3rxYtWqRzzz1X77//vt566y2vMXfeeaduvvlmdenSRd26ddPixYv1zTffqGnTpu4xeXl5+u6779y39+/fr61btyohIUFnnXWWJOn222/XwoUL9fbbbysxMdHd1apatWqKjY0N2msKJxUezgAAAAAAUBX4amsmHQ9tEu32cp4NAABhYvKxip7BaSUlJemaa67R+++/r6uuuipo+73yyis1duxY3XHHHcrNzdWAAQM0ceJETZ482T1myJAh+umnnzR+/Hjl5OTouuuu04gRI7Rx40b3mF9//VUdO3Z033/yySf15JNPqkePHu6CiTlz5kiSevbs6TUH1xo68B/hDAAAAAAAQeCrrZl0fN0ZwhkAAMJbamqqhgwZoujo6FLtp/B6MTNmzNCMGTO8to0ZM8br/sSJEzVx4kT3/d69e7srYiQpJSXltOvQlGadGvhmq+gJAAAAAABQFRRbOcOHGQAAhK0jR45o0aJF+uSTT7zWUS+Jb7/9VgkJCXruuecCPn5WVpZmzpypbdu2aceOHZo0aZI++ugjDR8+POB9Fqdfv35q06ZN0PdbVVE5AwAAAABAEHiuOWOz2eQ8cT/Lx1o0AAAgPHTq1El//PGHHn/8cbVo0cK9vU2bNvr55599Puf555/XXXfdpRtvvFGSVLt27YCPb1mWli1bpkcffVS5ublq0aKF3nzzTV122WUB77M4//73v5WdnS1JOvPMM4O+/6qGcAYAAAAAgCDwbGuWmJioY8eO98AnnAEAIHzt2bPH5/Zly5YpPz/f52N169ZVYmKiatSoUerjx8bG6qOPPir1fkoiOTm5XI5TVRDOAAAAAAAQBJ5tzapVq+YOZ4prdwYAAMJX48aNK3oKqGCsOQMAAAAAQBB4tjWrVq2a+zaVMwAAACiMcAYAAAAAgCDwbGtWvXp1n9sBAAAAiXAGAAAAAICgKNzWzIXKGQAAABRGOAMAAAAAQBDkFNPWLJtwBgAAAIUQzgAAAAAAEARZxbQ1yzKEMwAAAPBGOAMAAAAAQBDQ1gwAAISyyZMnq0OHDhU9DZwQUdETAAAAAACgKsgupq2ZZ0UNAAAIrrbz25br8b4d/m25Hi9QlmXprbfe0lVXXVXRU0ExqJwBAAAAACAIcszxEMYmKTEx0b2dNWcAAABQGOEMAAAAAABB4GpfFmuzKSYm5uR21pwBACBs9ezZU3feeafGjBmjM844Q3Xr1tULL7ygzMxMjRw5UomJiWrWrJk++OAD93PWrFmj8847T9HR0apfv77uv/9+FRQUeO3zrrvu0r333qsaNWqoXr16mjx5svvxlJQUSdLVV18ty7Lc911eeeUVpaSkqFq1avrzn/+s9PT0sjwFKAbhDAAAAAAAQeCqkIkpFM5QOQMAQHibP3++atWqpY0bN+rOO+/UrbfeqmuvvVbdunXTl19+qb59+2ro0KHKysrS/v371b9/f5177rn6+uuvNWfOHL300kt69NFHi+wzPj5eX3zxhWbMmKGpU6dq5cqVkqRNmzZJkubOnavU1FT3fUnatWuXli5dqvfee0/vvfee1qxZo8cee6z8TgbcCGcAAAAAAAiC7BNtzWJtNkVHR5/czpozAACEtfbt2+uhhx7S2WefrQceeECxsbGqVauWbr75Zp199tl6+OGHdfjwYX3zzTd67rnn1KhRI/3zn/9Uy5YtddVVV2nKlCl66qmn5PT4g4927dpp0qRJOvvsszVs2DB16dJFH3/8sSSpdu3akqTq1aurXr167vuS5HQ6NW/ePJ1zzjnq3r27hg4d6n4eyhfhDAAAAAAAQZDj0dbMbrcr2rIknWx3BgAAwlO7du3ct+12u2rWrKm2bdu6t9WtW1eSdPDgQW3fvl1du3aVdeLnCEm68MILlZGRoV9++cXnPiWpfv36Onjw4GnnkpKS4rU2Xkmfh+AjnAEAAAAAoJScxijHo3JGkuJO/DebNWcAAAhrkZGRXvcty/La5gpinE6njDFewYwkmRM/Y3hu97VPZwn+ICTQ5yH4CGcAAAAAACglV0sz6WQ44/ovlTMAAKCkWrdurXXr1rkDGUlat26dEhMTlZycXOL9REZGyuFwlMUUESSEMwAAAAAAlFKORwATaxHOAACAwNx2223at2+f7rzzTu3YsUNvv/22Jk2apHHjxslmK/nH+SkpKfr444914MAB/fHHH2U4YwQqoqInAAAAAABAZZftGc6425rZjz9mjJzGyFaoRQkAACi9b4d/W9FTCKrk5GQtW7ZM99xzj9q3b68aNWpo9OjReuihh/zaz1NPPaVx48bpxRdfVHJysvbs2VM2E0bACGcAAAAAACglz7ZmMYXamklSjjGKI5wBACDsrF69usg2X0GJZxuzHj16aOPGjX7tc+nSpV73Bw4cqIEDB3ptmzx5siZPnuy1bcyYMRozZkyxx0LZoa0ZAAAAAACllOWzcsbm83EAAACAcAYAAAAAgFLKMUXDGc/KmWzCGQAAAHggnAEAAAAAoJSynSdbkbgrZ+xUzgAAAMA3whkAAAAAAErJszLG15oznmvSAAAAAIQzAAAAAACUUtZp2ppROQMAAABPhDMAAAAAAJRSjq+2ZoQzAAAAKAbhDAAAAAAApeTZ1izOZ1szwhkAAACcRDgDAAAAAEApeYYvMTZLEpUzAAAAKB7hDAAAAAAApZTto61ZrM3u83EAAACAcAYAAAAAgFLybGvGmjMAAKCkVq9eLcuydPTo0YqeSlBUtddTliIqegIAAAAAAFR2Xm3NLJucYs0ZAADKw/aWrcr1eK12bC/X41Vl8+bN05gxY0IqyCnPOVE5AwAAAABAKVE5AwAAKou8vLyKnkJQGWNUUFBQ0dPwG+EMAAAAAACllG18rTnjUTlDOAMAQNjKzc3VXXfdpTp16igmJkYXXXSRNm3a5DXm888/V/v27RUTE6Pzzz9f3377rfuxn3/+WQMHDtQZZ5yh+Ph4tWnTRsuWLXM//t1336l///5KSEhQ3bp1NXToUB06dMj9eM+ePXXHHXdo3LhxqlWrlnr37q0bbrhBf/7zn73mkJ+fr1q1amnu3LmSjoceM2bMUNOmTRUbG6v27dvrjTfe8HrOsmXL1Lx5c8XGxqpXr17as2dPic7J6tWrNXLkSB07dkyWZclut+uxxx6TJL366qvq0qWLEhMTVa9ePQ0ePFgHDx70eq5lWVq+fLm6dOmi6OhorV27Vunp6RoyZIji4+NVv359Pf300+rZs6fGjBnjfm5eXp7uvfdeJScnKz4+Xueff75Wr17tc06WZWny5Mklej2BIJwBAAAAAKCUPMOXOJ+VM6bIcwAAQHi499579eabb2r+/Pn68ssvddZZZ6lv3746cuSIe8w999yjJ598Ups2bVKdOnX0f//3f8rPz5ck3X777crNzdWnn36qb7/9Vo8//rgSEhIkSampqerRo4c6dOigzZs368MPP9Rvv/2m6667zmsO8+fPV0REhD7//HM9//zzGjJkiN555x1lZGS4xyxfvlyZmZm65pprJEkPPfSQ5s6dqzlz5mjbtm0aO3asbrzxRq1Zs0aStG/fPv3pT39S//79tXXrVt100026//77S3ROunXrplmzZikpKUmpqanav3+/7rjjDknHA5RHHnlEX3/9tZYuXardu3drxIgRPs/r9OnTtX37drVr107jxo3T559/rnfeeUcrV67U2rVr9eWXX3o9Z+TIkfr888+1aNEiffPNN7r22mt1+eWXa+fOnUXmlJqaqvHjx5fo9QSCNWcAAAAAACglVzhjlxRhWZK8K2eyWHMGAICwlJmZqTlz5mjevHnq16+fJOnFF1/UypUr9dJLL+ncc8+VJE2aNEm9e/eWdDxIadiwod566y1dd9112rt3r6655hq1bdtWktS0aVP3/ufMmaNOnTpp2rRp7m0vv/yyGjVqpB9++EHNmzeXJJ111lmaMWOGe0yzZs0UHx+vt956S0OHDpUkLVy4UAMHDlRSUpIyMzM1c+ZMffLJJ+ratav7uJ999pmef/559ejRQ3PmzFHTpk319NNPy7IstWjRwh0enU5UVJSqVasmy7JUr149OZ1OpaWlSZJGjRrlHte0aVP94x//0HnnnaeMjAx3KCVJU6dOdZ+z9PR0zZ8/XwsXLtSll14qSZo7d64aNGjgHr9r1y69/vrr+uWXX9zbx48frw8//FBz587VtGnTvOZU1ghnAAAAAAAoJVdbs1ibTdaJcIY1ZwAAwK5du5Sfn68LL7zQvS0yMlLnnXeetm/f7g5nXAGIJNWoUUMtWrTQ9u3bJUl33XWXbr31Vq1YsUKXXXaZrrnmGrVr106StGXLFq1atcortPA8tiuc6dKli9djkZGRuvbaa/Xaa69p6NChyszM1Ntvv62FCxdKOt4qLScnxx1+uOTl5aljx46SpO3bt+uCCy5w/+xT+HUE6quvvtLkyZO1detWHTlyRM4TP0ft3btXrVu3do/zfE0//fST8vPzdd5557m3VatWTS1atHDf//LLL2WMcZ8Tl9zcXNWsWbPU8/YX4QwAAAAAAKXkqpyJtU4GMpE2myIkFYg1ZwAACFfmxB9weAYYru2FtxXmevymm25S37599f7772vFihWaPn26nnrqKd15551yOp0aOHCgz2qV+vXru2/Hx8cXeXzIkCHq0aOHDh48qJUrVyomJsZd3eMKRN5//30lJyd7PS86OtrrtQVTZmam+vTpoz59+ujVV19V7dq1tXfvXvXt21d5eXleYz1f06nOs4vT6ZTdbteWLVtkt9u9xvkKt8oaa84AAAAAAFBK7nDG5v2BgKt6hsoZAADC01lnnaWoqCh99tln7m35+fnavHmzWrVq5d62YcMG9+0//vhDP/zwg1q2bOne1qhRI91yyy1asmSJ7r77br344ouSpE6dOmnbtm1KSUnRWWed5fXPVyDjqVu3bmrUqJEWL16s1157Tddee62ioqIkSa1bt1Z0dLT27t1bZL+NGjVyj/Gcd+HXcTpRUVFyOBxe23bs2KFDhw7pscceU/fu3dWyZUsdPHjwtPtq1qyZIiMjtXHjRve2tLQ07dy5032/Y8eOcjgcOnjwYJHX5Gpj5mtOZYVwBgAAAACAUvJsa+bJdT+7DP6yFAAAhL74+Hjdeuutuueee/Thhx/qu+++080336ysrCyNHj3aPW7q1Kn6+OOP9b///U8jRoxQrVq1dNVVV0mSxowZo+XLl2v37t368ssv9cknn7iDndtvv11HjhzRDTfcoI0bN+qnn37SihUrNGrUqNOGDJZlafDgwfrXv/6llStX6sYbb3Q/lpiYqPHjx2vs2LGaP3++du3apa+++krPPvus5s+fL0m65ZZbtGvXLo0bN07ff/+9Fi5cqHnz5pX43KSkpCgjI0Mff/yxDh06pKysLJ155pmKiorSM888o59++knvvPOOHnnkkdPuKzExUcOHD9c999yjVatWadu2bRo1apRsHi1nmzdvriFDhmjYsGFasmSJdu/erU2bNunxxx/XsmXLip1TWaGtGQAAAAAApeAwRnmucMbyDmdclTO0NQMAoGy02rG9oqdwWo899picTqeGDh2q9PR0denSRcuXL9cZZ5zhNeZvf/ubdu7cqfbt2+udd95xV7E4HA7dfvvt+uWXX5SUlKTLL79cTz/9tCSpQYMG+vzzz3Xfffepb9++ys3NVePGjXX55ZfLZjt9bcaQIUM0bdo0NW7c2GtdHEl65JFHVKdOHU2fPl0//fSTqlevrk6dOunBBx+UJJ155pl68803NXbsWD333HM677zzNG3aNI0aNapE56Vbt2665ZZbdP311+vw4cO67777NG3aNM2bN08PPvig/vGPf6hTp0568skn9X//93+n3d/MmTN1yy236IorrlBSUpLuvfde7du3TzExMe4xc+fO1aOPPqq7775b+/fvV82aNdW1a1f179/f55wmTZqkyZMnl+j1+MsyZdEYLgykpaWpWrVqOnbsmJKSkkq9v/z8fC1btkz9+/dXZGRkEGYIAMHB9QlAqOL6BKCibG/Zyut+ptOhc0+0zOgaF6cXzjpbu6ZOUbOHJ+n673fou9xc2SV907yFVx/0yvBhEoCqhZ+fUFnl5ORo9+7datKkidcH7ag6nE6n0tLSlJSUVKJQqSQyMzOVnJysp556yqtKqbRO9X70JzegcgYAAAAAgFLIcp78m8cYm+/KGYekfGMUdZqFfwEAABCYr776Sjt27NB5552nY8eOaerUqZKkK6+8soJn5htrzgAAAAAAUAqeLcviLN9rzkhSFo0rAABAGOnXr58SEhJ8/ps2bVqZHPPJJ59U+/btddlllykzM1Nr165VrVq1yuRYpUXlDAAAAAAApZBjToYzsTbvyhjPNWiynE5Vt9vLbV4AAAAV6d///reys7N9PlajRo2gH69jx47asmVL0PdbVghnAAAAAAAohWyPtmaxxbQ1k46HMwAAAOEiOTm5oqcQ0mhrBgAAAABAKXiGLjFW4XDmZCVNNuEMAAClYmgRihAQrPch4QwAAAAAAKWQfYq2Zl6VM4ZwBgCAQERGRkqSsrKyKngmwMn3oet9GSjamgEAAAAAUAo5Hm3N4gpVzniuOePZ/gwAAJSc3W5X9erVdfDgQUlSXFycLMs6zbNQmTidTuXl5SknJ0c2W2jWlBhjlJWVpYMHD6p69eqyl3ItQcIZAAAAAABKwbNdGWvOAABQNurVqydJ7oAGVYsxRtnZ2YqNjQ354K169eru92NpEM4AAAAAAFAKnu3KYgq1NfMMa1hzBgCAwFmWpfr166tOnTrKz8+v6OkgyPLz8/Xpp5/q4osvLnW7sLIUGRlZ6ooZF8IZAAAAAABKwbNdWaxVuHLmZFjDmjMAAJSe3W4P2ofjCB12u10FBQWKiYkJ6XAmmEKzeRsAAAAAAJVEjkfoEmcrfs0Z2poBAADAhXAGAAAAAIBS8GxXFlOoR3qcV1szIwAAAEAinAEAAAAAoFSyPNua2Qq3NaNyBgAAAEURzgAAAAAAUAqebc1OFc5ks+YMAAAATiCcAQAAAACgFDzbmsXZvNuaxXq0OaNyBgAAAC6EMwAAAAAAlILnWjIxlvev2bFea84QzgAAAOA4whkAAAAAAEohu4RtzTzXpgEAAEB4I5wBAAAAAKAUXBUxEZIiLe+2ZjGWJdcW1pwBAACAC+EMAAAAAACl4GprFmcr+iu2ZVmKPdHqLJO2ZgAAADiBcAYAAAAAgFJwVcTE+AhnJCnWdrx2hjVnAAAA4EI4AwAAAABAKbhCl9hCLc1cXBU1WYQzAAAAOIFwBgAAAACAUsg2x9uaxRZTOeMKZ1zjAAAAAMIZAAAAAAACVGCM8k3xa85IUtyJNWfyjFEBAQ0AAABEOAMAAAAAQMA815GJKaatmWvNmcLjAQAAEL4IZwAAAAAACFC2ORm2nK6tWeHxAAAACF+EMwAAAAAABCjbebJNWazl+1dsz9Amy0lbMwAAABDOAAAAAAAQMM82ZZ7tyzzFWZ7hDJUzAAAAIJwBAAAAACBgJWlr5l05QzgDAAAAwhkAAAAAAAJWsrZmJytqWHMGAAAAEuEMAAAAAAAB825r5vtX7DgqZwAAAFAI4QwAAAAAAAHybmt2+jVnPCttAAAAEL4IZwAAAAAACFBJ2ppROQMAAIDCCGcAAAAAAAhQjjl9WzOvNWcIZwAAACDCGQAAAAAAApblLEFbM8/KGUM4AwAAgBAIZ5577jk1adJEMTEx6ty5s9auXVvs2NTUVA0ePFgtWrSQzWbTmDFjiozp2bOnLMsq8m/AgAHuMZMnTy7yeL169cri5QEAAAAAqjDPtmYxxbQ182x3RlszAAAASBUczixevFhjxozRhAkT9NVXX6l79+7q16+f9u7d63N8bm6uateurQkTJqh9+/Y+xyxZskSpqanuf//73/9kt9t17bXXeo1r06aN17hvv/026K8PAAAAAFC1ZXtUwsQV09bMc7tnmAMAAIDwFVGRB585c6ZGjx6tm266SZI0a9YsLV++XHPmzNH06dOLjE9JSdHs2bMlSS+//LLPfdaoUcPr/qJFixQXF1cknImIiKBaBgAAAABQKjmebc0s323NPNeioXIGAAAAUgWGM3l5edqyZYvuv/9+r+19+vTRunXrgnacl156SX/+858VHx/vtX3nzp1q0KCBoqOjdf7552vatGlq2rRpsfvJzc1Vbm6u+35aWpokKT8/X/n5+aWep2sfwdgXAAQT1ycAoYrrE4CK4oiOdt/O9AhkomJi5IiOlvPE467/xniGM9bJ53P9AlDe+PkJQKiqKtcnf+ZfYeHMoUOH5HA4VLduXa/tdevW1YEDB4JyjI0bN+p///ufXnrpJa/t559/vhYsWKDmzZvrt99+06OPPqpu3bpp27Ztqlmzps99TZ8+XVOmTCmyfcWKFYqLiwvKfCVp5cqVQdsXAAQT1ycAoYrrE4ByN/Xk74aHpk2TNm6UJP12373K9+jmsHvCg5KkrKwsafBgSdLhJk2068TvlruWLSuvGQOAF35+AhCqKvv1KSsrq8RjK7StmSRZhcq+jTFFtgXqpZde0jnnnKPzzjvPa3u/fv3ct9u2bauuXbuqWbNmmj9/vsaNG+dzXw888IDXY2lpaWrUqJH69OmjpKSkUs81Pz9fK1euVO/evRUZGVnq/QFAsHB9AhCquD4BqCjfdznXfdv+0y737VZPzVSC3S5ndLR2T3hQTf4+TbbcXDmMxzozO3eq2cOTJEktNm8qtzkDgMTPTwBCV1W5Prk6bpVEhYUztWrVkt1uL1Ilc/DgwSLVNIHIysrSokWLNHXq1NOOjY+PV9u2bbVz585ix0RHRyvao3TdJTIyMqhvlmDvDwCChesTgFDF9QlAebN7tLzOKXC4b8fn58teUOC+b8vNlT03V3ZJ0ZalXGOUXeBwP59rF4CKws9PAEJVZb8++TN32+mHlI2oqCh17ty5SJnSypUr1a1bt1Lv/z//+Y9yc3N14403nnZsbm6utm/frvr165f6uAAAAACA8JFlnJKkSMtSxCm6QMSeWHcm+8R4AAAAhLcKbWs2btw4DR06VF26dFHXrl31wgsvaO/evbrlllskHW8ltn//fi1YsMD9nK1bt0qSMjIy9Pvvv2vr1q2KiopS69atvfb90ksv6aqrrvK5hsz48eM1cOBAnXnmmTp48KAeffRRpaWlafjw4WX3YgEAAAAAVU6283jYEnua9txxlqWjkrKchDMAAACo4HDm+uuv1+HDhzV16lSlpqbqnHPO0bJly9S4cWNJUmpqqvbu3ev1nI4dO7pvb9myRQsXLlTjxo21Z88e9/YffvhBn332mVasWOHzuL/88otuuOEGHTp0SLVr19YFF1ygDRs2uI8LAAAAAEBJ5JxYTybOdurGFK7KGcIZAAAASBUczkjSbbfdpttuu83nY/PmzSuyzXgupFiM5s2bn3LcokWLSjw/AAAAAACK466cOU04E+dua2bkNEa201TaAAAAoGqrsDVnAAAAAACo7FzhTMzp2pp5hDc5JfijQwAAAFRthDMAAAAAAAQgzxgVnLh92soZ6+TjtDYDAAAA4QwAAAAAAAHI8QhZPMMXX2JtJytrsglnAAAAwh7hDAAAAAAAAfAMWTzDF18825plG8IZAACAcEc4AwAAAABAALI91o6JOU1bM8+2Z1lO1pwBAAAId4QzAAAAAAAEwKty5jRtzVhzBgAAAJ4IZwAAAAAACIA/bc28K2cIZwAAAMId4QwAAAAAAAHwbGsWe5q2ZnEe4Q1rzgAAAIBwBgAAAACAAPjT1ozKGQAAAHginAEAAAAAIACeFTCna2vmueZMttOcYiQAAADCAeEMAAAAAAAB8AxZTt/WjMoZAAAAnEQ4AwAAAABAAPxra8aaMwAAADiJcAYAAAAAgAD41daMyhkAAAB4IJwBAAAAACAAXm3NTlc547XmDOEMAABAuCOcAQAAAAAgAF5tzfxac8acYiQAAADCAeEMAAAAAAAB8KetmWd4k8WaMwAAAGGPcAYAAAAAgAB4Vc6cpq2ZZ+UMbc0AAABAOAMAAAAAQACyjceaM6dpaxZlWYo4cTuLcAYAACDsEc4AAAAAABAA7zVnTt3WTDpZPZPNmjMAAABhj3AGAAAAAIAAeIYzMadpayadrK5hzRkAAAAQzgAAAAAAEICcE23Noi1LdsufyhnCGQAAgHBHOAMAAAAAQABca8ecbr0Zl9gTAU6W0yljaG0GAAAQzghnAAAAAAAIgKsCJqYEVTPSyRDHISmfcAYAACCsEc4AAAAAABCA7BMBS1wJK2c8x2URzgAAAIQ1whkAAAAAAPxkjFGOq62ZVdK2Zh7hDOvOAAAAhDXCGQAAAAAA/JRvjBwnbsfaStbWzKtyhnAGAAAgrBHOAAAAAADgJ8+2ZDElbmt2MsTJJpwBAAAIa4QzAAAAAAD4yTNciSthWzPvNWcIZwAAAMIZ4QwAAAAAAH7K8QhXStrWzHPNmWynOcVIAAAAVHWEMwAAAAAA+MkzXIktcVszz3CGyhkAAIBwRjgDAAAAAICfsjzClZgStjXzDHGyCGcAAADCGuEMAAAAAAB+CqStWZzHONacAQAACG+EMwAAAAAA+MmzrVlcSStnLCpnAAAAcBzhDAAAAAAAfvJcMyYmoDVnzClGAgAAoKojnAEAAAAAwE9ZAbU1o3IGAAAAxxHOAAAAAADgpxyPypfYkrY18whxsllzBgAAIKwRzgAAAAAA4CfPtmZxJW1rxpozAAAAOIFwBgAAAAAAP3lWvsSUsK1ZrNeaM4QzAAAA4YxwBgAAAAAAP3lWvpS0rZn3mjPmFCMBAABQ1RHOAAAAAADgpxzjseZMCduaxViWXDU2rDkDAAAQ3ghnAAAAAADwk/eaMyVra2ZZlrvKhjVnAAAAwhvhDAAAAAAAfvIMZ2JK2NZMkmJPBDmEMwAAAOGNcAYAAAAAAD9lB9DWTDq57ozn8wEAABB+CGcAAAAAAPCTd+VMydqaSSfDGSpnAAAAwhvhDAAAAAAAfsp2Hq98ibEs2fwJZ060QMszRgUFBWUyNwAAAIQ+whkAAAAAAPyUbY5XvvjT0uz4+JNBTmZmZlDnBAAAgMqDcAYAAAAAAD+52prF+lE1I51sayYRzgAAAIQzwhkAAAAAAPzkamvmf+XMyfEZGRlBnRMAAAAqD8IZAAAAAAD8YIwJvK2ZReUMAAAACGcAAAAAAPBLrjEyJ26Xpq0ZlTMAAADhi3AGAAAAAAA/5Bjjvu1/W7OTYQ6VMwAAAOGLcAYAAAAAAD9kOZ3u2/6GM1TOAAAAQCKcAQAAAADAL9me4Yy/bc1YcwYAAAAinAEAAAAAwC/ZJjiVM4QzAAAA4YtwBgAAAAAAP+Q4PdacsQJfc4a2ZgAAAOGLcAYAAAAAAD8Ea80ZKmcAAADCF+EMAAAAAAB+8GxrFmPzb80Zz0obKmcAAADCF+EMAAAAAAB+8GxrFudnWzMqZwAAACARzgAAAAAA4JfsUrQ18xxP5QwAAED4IpwBAAAAAMAPWaVoaxZP5QwAAABEOAMAAAAAgF+yPdqaxfrZ1izWOhnmUDkDAAAQvghnAAAAAADwQ45H5UxcKdqaUTkDAAAQvghnAAAAAADwg/eaM/61NbNblqJPVM9QOQMAABC+CGcAAAAAAPBDlkdbsxg/25pJJ6tnqJwBAAAIX4QzAAAAAAD4Idt4Vs74/2t1HJUzAAAAYY9wBgAAAAAAP+R4tDVzBS3+oHIGAAAAhDMAAAAAAPgh26OtWUCVMyeek5WVJadH0AMAAIDwQTgDAAAAAIAfXG3NLEnRAVTOuMIZY4yys7ODOTUAAABUEoQzAAAAAAD4IftEtUuMZckKJJyxTv4qTmszAACA8EQ4AwAAAACAH7LN8bZmcQG0NJOkWNvJQCcjIyMocwIAAEDlQjgDAAAAAIAf3JUzAYYznqEOlTMAAADhiXAGAAAAAAA/uMKZ2ABamklSrEc4Q+UMAABAeCKcAQAAAACghIwx7rZmsYFWzrDmDAAAQNgjnAEAAAAAoISys7PdtwNfc4bKGQAAgHBHOAMAAAAAQAl5VrrEBNzW7OTzqJwBAAAIT4QzAAAAAACUkGeYEnBbMypnAAAAwh7hDAAAAAAAJZSVleW+HWux5gwAAAACQzgDAAAAAEAJeVfOBNbWjMoZAAAAVHg489xzz6lJkyaKiYlR586dtXbt2mLHpqamavDgwWrRooVsNpvGjBlTZMy8efNkWVaRfzk5OQEfFwAAAAAAKThtzVhzBgAAABUazixevFhjxozRhAkT9NVXX6l79+7q16+f9u7d63N8bm6uateurQkTJqh9+/bF7jcpKUmpqale/2JiYgI+LgAAAAAAUqFwJtC2ZjbamgEAAIS7Cg1nZs6cqdGjR+umm25Sq1atNGvWLDVq1Ehz5szxOT4lJUWzZ8/WsGHDVK1atWL3a1mW6tWr5/WvNMcFAAAAAEAqtOZMgG3NPEMd2poBAACEp4iKOnBeXp62bNmi+++/32t7nz59tG7dulLtOyMjQ40bN5bD4VCHDh30yCOPqGPHjqU6bm5urnJzc93309LSJEn5+fnKz88v1Xxd+/H8LwCECq5PAEIV1yeg8nA4HPrss8+Umpqq+vXr66KLLpLdbq/oaQXE9bugJMVERckRHV1kjPPENqePxyQpxjoZ6qSnp3MdA1Bu+PkJQKiqKtcnf+ZfYeHMoUOH5HA4VLduXa/tdevW1YEDBwLeb8uWLTVv3jy1bdtWaWlpmj17ti688EJ9/fXXOvvsswM+7vTp0zVlypQi21esWKG4uLiA51vYypUrg7YvAAgmrk8AQhXXJyC0rV+/Xv/+9791+PBh97aaNWvqpptuUteuXStwZoHZuHGj+3baoEHa1atXsWN3T3jQ5/b09HRp6FBJ0p49e7Rs2bLgThIAToOfnwCEqsp+ffKssj6dCgtnXCzLuwzcGFNkmz8uuOACXXDBBe77F154oTp16qRnnnlG//jHPwI+7gMPPKBx48a576elpalRo0bq06ePkpKSAp6vS35+vlauXKnevXsrMjKy1PsDgGDh+gQgVHF9AkLfW2+9pRkzZsgY47X9yJEjmjFjhhYtWqSrr766gmYXmO3bt7tvN166VM1WrS4yxhkdrd0THlSTv0+TzaMDg0u+0+m+HRcXp/79+5fJXAGgMH5+AhCqqsr1ybPK+nQqLJypVauW7HZ7kWqVgwcPFqlqKQ2bzaZzzz1XO3fuLNVxo6OjFe2jJD0yMjKob5Zg7w8AgoXrE4BQxfUJCE0Oh0N33313kWBGOvnHcePHj9c111xTqVqc5eTkuG/HOxyy+whfXGy5uT4ft+v4L+MFkjIzM7mGASh3/PwEIFRV9uuTP3O3nX5I2YiKilLnzp2LlCmtXLlS3bp1C9pxjDHaunWr6tevX67HBQAAAIBwtnbtWv3yyy/FPm6M0b59+7R27dpynFXpZWZmum/HWoH/Sh1nsxXZHwAAAMJHhbY1GzdunIYOHaouXbqoa9eueuGFF7R3717dcsstko63Etu/f78WLFjgfs7WrVslSRkZGfr999+1detWRUVFqXXr1pKkKVOm6IILLtDZZ5+ttLQ0/eMf/9DWrVv17LPPlvi4AAAAAAD/bG/Zyuv+5hK2dNg8eIjqerSKbrVj+ylGVzyvcMYWeDgTa7MpzelURkZGMKYFAACASqZCw5nrr79ehw8f1tSpU5WamqpzzjlHy5YtU+PGjSVJqamp2rt3r9dzOnbs6L69ZcsWLVy4UI0bN9aePXskSUePHtVf/vIXHThwQNWqVVPHjh316aef6rzzzivxcQEAAAAApVM7omStyko6LlR4LvIaW4r1Ul1VN1TOAAAAhKcKDWck6bbbbtNtt93m87F58+YV2earX7Gnp59+Wk8//XSpjgsAAAAAKJ3OsXGqGxGhgwUF8vVbnCWpbkSEOsfGlffUSiVYlTNxtuPBTkZGhnsNHgAAAISPCltzBgAAAABQddktSw/WqevzMVcM8UCdurJXslAimG3NJMnhcCgvL6/U8wIAAEDlQjgDAAAAACgTvRMTNatBshIKhRh1IyI0q0GyeicmVtDMAucVzpQiWIrzOCesOwMAABB+CGcAAAAAAGWmd2Kirkmq5r5f227XyqbNKmUwI51cc8YmKSoIa85IrDsDAAAQjghnAAAAAABlKss43bfzpUrXysyTK0iJtdlKtU6MZ+UM4QwAAED4IZwBAAAAAJSpdOfJcCbD4ZAxpgJnUzquICWmlAFTnO3k82lrBgAAEH4IZwAAAAAAZSrDcTKcKZCUU4nDGVdbszhb6X6dpnIGAAAgvBHOAAAAAADKVLrT4XU/w6OSprJxtzWzSvfrtOfzqZwBAAAIP4QzAAAAAIAylV4ojEl3OIoZGdqcTqeys7MlSTG20rY1o3IGAAAgnBHOAAAAAADKVLqjUDhTSStnXC3NJCm2lG3NPJ9P5QwAAED4IZwBAAAAAJSpjCrS1swznIkrZVuzOI/KGypnAAAAwg/hDAAAAACgzOQbo2xjvLYVXoOmsvAMUWJL2daMNWcAAADCG+EMAAAAAKDMZPqokslwVM7KGc9wJiaIbc2onAEAAAg/hDMAAAAAgDKT7ihaJVMlKmdK2dYsnjVnAAAAwhrhDAAAAACgzKT7qJzxta0y8FxzptRtzVhzBgAAIKwRzgAAAAAAykyGjyqZ9CrQ1iyulG3N4izamgEAAIQzwhkAAAAAQJnxFcT4CmwqA681Z0rZ1iyWtmYAAABhjXAGAAAAAFBmMqpQWzOvNWdK2dbMs/KGyhkAAIDwQzgDAAAAACgz6T6qZDIqaVszrzVnSlk5E2NZsqzjAQ+VMwAAAOGHcAYAAAAAUGZ8V85U/rZmsaVcc8ayLMXHxxfZLwAAAMID4QwAAAAAoMz4XnOmclbOBLOtmSR3OEPlDAAAQPghnAEAAAAAlJkMH1UylXXNmWC2NZOkhIQESVTOAAAAhCPCGQAAAABAmfEMYpJOtALLcDhkjKmoKQUsmG3NJCpnAAAAwhnhDAAAAACgzGR4tDWrHxkpSSqQlFPpw5nStzVzVc7k5uaqoKCg1PsDAABA5UE4AwAAAAAoM+kebc3qR0S4b1fGdWe8wpkgtDVzVc4U3jcAAACqPsIZAAAAAECZcbU1i7UsVbPbT253FF2LJtR5rTkTxLZmEuEMAABAuCGcAQAAAACUGVeFTKLdrkSbRzhT2StnghDOuNqaSaw7AwAAEG4IZwAAAAAAZcZVIZNgsynRfvJXUM92Z5WFK5yxS4oMwv6onAEAAAhfhDMAAAAAgDJRYIyyjZEkJdpsSvCoNkl3VN7KmVibTZZllXp/npUzhDMAAADhhXAGAAAAAFAmMjxalyXavNuaZVTCtmauNWdireD8Ku1ZOUNbMwAAgPBCOAMAAAAAKBMZjpOtyxLsNiVUkbZmsbbSV81IVM4AAACEM8IZAAAAAECZSPeqnLFV+soZz7ZmwUDlDAAAQPginAEAAAAAlAnPACbBZq/Ua844HA7l5uZKCl5bMypnAAAAwhfhDAAAAACgTHi2Lku025ToEc5kVLK2Zq71ZqTgtTWjcgYAACB8Ec4AAAAAAMpEhsOzcsamRPvJtmbplaytmWdlS0yQ2ppROQMAABC+CGcAAAAAAGXCe80Z77ZmGZWsrZlneBIXpLZmVM4AAACEL8IZAAAAAECZ8GxrlmCzKcayFOHjscqgLNqaUTkDAAAQvghnAAAAAABlwrM6Jslul2VZSjjR2iyjErc1iw1SWzMqZwAAAMIX4QwAAAAAoEx4tjVztTRLPPHfNEflqpzxWnOmDNqaUTkDAAAQXghnAAAAAABlIsOjdVlioXAmw+mUMaZC5hUI78qZ4Lc1o3IGAAAgvBDOAAAAAADKhFflzIl2Zokn/uuQlF2JwhnPNWfiqJwBAABAKRHOAAAAAADKhOeaM662Zgke67V4VtaEurJYc8ZutysmJqbI/gEAAFD1Ec4AAAAAAMpE+onwJdayFGEdbwWW6BFspHuEN6HOa82ZILU1k05Wz9DWDAAAILwQzgAAAAAAykTGibZmrlZmkpRgsxd5vDLwqpwJUlsz6eS6M1TOAAAAhBfCGQAAAABAmXBVxni2Mku0e1TOVKK2Zl5rzgSprZlE5QwAAEC4IpwBAAAAAASdwxhlmROVMx5hRkJVaGtmBa+tmatyJisrS85KVEkEAACA0iGcAQAAAAAEnWfLskSPVmaJVaGtWRlUzhhjlJ2dHbT9AgAAILQRzgAAAAAAgi7dcbJlWYJHK7OEKtDWLJjhjKtyRmLdGQAAgHBCOAMAAAAACLp0r8oZjzVnqkDlTJwteG3NXJUzEuvOAAAAhBPCGQAAAABA0HkGLwkegUzVWHOGyhkAAACUDuEMAAAAACDoPFuWJdo9K2cqZ1uzsl5zRqJyBgAAIJwQzgAAAAAAgi7D4Vk54xHO2E9W0aRXorZmrjVnIiMjFWmVTVszKmcAAADCB+EMAAAAACDovNecsXvcPvlraIaj8lXOeIYpweDZ1ozKGQAAgPBBOAMAAAAACDrPlmWelTPRlqUI95jKUznjCmfi4uKCul8qZwAAAMIT4QwAAAAAIOg825p5rjljWZa7tVlGJQxnyrJyhnAGAAAgfBDOAAAAAACCzrMqJsGjrdnx+8d/FU2vRG3NXGvOBDuc8dwfbc0AAADCB+EMAAAAACDoMjzamnmuM+N5P8PplDGmXOcViIKCAuXl5UmicgYAAADBQTgDAAAAAAg6z8oZVxuzwvcdkrIrQTjjGZqU5ZozVM4AAACED8IZAAAAAEDQea45E1+ocibB475nhU2ocrU0k6icAQAAQHAQzgAAAAAAgi79ROgSa1mKtCyvxzzbnKV7hDihyjM0Yc0ZAAAABAPhDAAAAAAg6DJOtDVLsNmLPOa5LcNZucKZYLc1o3IGAAAgPBHOAAAAAACCzlURk2gv+mun57b0StDWjMoZAAAABBvhDAAAAAAgqBzGKMu4KmeK/tqZUMnamrHmDAAAAIKNcAYAAAAAEFSercoSfbQ1S/LYll7J2poFO5yJiopSRESEJCpnAAAAwgnhDAAAAAAgqDI8WpUl+GhrllCJ25oFe80Z6WTgQ+UMAABA+CCcAQAAAAAElWerskQfbc08q2kyKkFbs7KsnJFOtjajcgYAACB8EM4AAAAAAILKs61Zgo+2Zl5rzlSCypmyXHPGc59UzgAAAIQPwhkAAAAAQFB5Bi4+K2c82pplhPmaM9LJypnMzEwZY4K+fwAAAIQewhkAAAAAQFB5tjXzueaMRzVNeiULZ8pyzZmCggLl5eUFff8AAAAIPYQzAAAAAICg8qyGSfTR1syzmibDQVszV+WMRGszAACAcEE4AwAAAAAIKs+2Zgk+2ppFW5Yi3GMrV+VMWa45I0kZGRlB3z8AAABCD+EMAAAAACCovCpnfLQ1syxLiXZ7kbGhqqzbmlE5AwAAEH4IZwAAAAAAQeW55oyvtmbSyYqa9ErQ1ozKGQAAAAQb4QwAAAAAIKgyTtPWTDq57kyG0yljTLnMK1CsOQMAAIBgI5wBAAAAAASV5zoyicWFMyfamjkkZYd4OFPWbc2onAEAAAg/hDMAAAAAgKDK8GhrlmD33dbMM7QJ9dZmrnAmKipKERERQd8/lTMAAADhh3AGAAAAABBU6SfamsValiIty+eYBI+1aDwrbUKRKzApi5ZmhfdL5QwAAEB4IJwBAAAAAARVxomwxTOAKSzRfvLX0YwQD2dca86URzhD5QwAAEB4IJwBAAAAAASVqxLGM4ApLMGzrZmzcrQ1K6twxrOtGZUzAAAA4aHCw5nnnntOTZo0UUxMjDp37qy1a9cWOzY1NVWDBw9WixYtZLPZNGbMmCJjXnzxRXXv3l1nnHGGzjjjDF122WXauHGj15jJkyfLsiyvf/Xq1Qv2SwMAAACAsOMwRpnuypnif+VM9Kiq8VyjJhS5wpm4uLgy2T+VMwAAAOGnQsOZxYsXa8yYMZowYYK++uorde/eXf369dPevXt9js/NzVXt2rU1YcIEtW/f3ueY1atX64YbbtCqVau0fv16nXnmmerTp4/279/vNa5NmzZKTU11//v222+D/voAAAAAINxkerQoSzxVWzOvypnQDWfy8vJUUFAgqXwqZwhnAAAAwkOFhjMzZ87U6NGjddNNN6lVq1aaNWuWGjVqpDlz5vgcn5KSotmzZ2vYsGGqVq2azzGvvfaabrvtNnXo0EEtW7bUiy++KKfTqY8//thrXEREhOrVq+f+V7t27aC/PgAAAAAIN55BS8Kp2prZK0dbM9d6M1L5rDlDWzMAAIDwEFFRB87Ly9OWLVt0//33e23v06eP1q1bF7TjZGVlKT8/XzVq1PDavnPnTjVo0EDR0dE6//zzNW3aNDVt2rTY/eTm5io3N9d9Py0tTZKUn5+v/Pz8Us/TtY9g7AsAgonrE4BQxfUJCC2O6GhJ0jHPcCYy0r29sLi8GPftNMtyjwu17+mjR4+6b8fGxio/P7/Y1+TiPPG48zTjXK812mNcenp6yJ0DAFUHPz8BCFVV5frkz/wrLJw5dOiQHA6H6tat67W9bt26OnDgQNCOc//99ys5OVmXXXaZe9v555+vBQsWqHnz5vrtt9/06KOPqlu3btq2bZtq1qzpcz/Tp0/XlClTimxfsWJFUPsOr1y5Mmj7AoBg4voEIFRxfQJCxNTjvy99v22bNGGCJKmge3ftGjnS5/BjP/wg3XuvJGl/ly7a9Ze/SJJ2LVtWDpMtOc8W2X/88YeWLVvmfq2ns3vCg6d83PVaPQOgn3766fgxAKAM8fMTgFBV2a9PnlXXp1Nh4YyLZVle940xRbYFasaMGXr99de1evVqxcSc/Kusfv36uW+3bdtWXbt2VbNmzTR//nyNGzfO574eeOABr8fS0tLUqFEj9enTR0lJSaWea35+vlauXKnevXsrMjKy1PsDgGDh+gQgVHF9AkLL913OlST9cqLLgCQ13PCFmu3e43O8LSfHfdu+br2a/XI8BGmxeVPZTTIAW7dudd9u3ry5+vfv736txXFGR2v3hAfV5O/TZPPowFCY67V6/hKfkJCg/v37l27SAFAMfn4CEKqqyvUpzeNn4dOpsHCmVq1astvtRapkDh48WKSaJhBPPvmkpk2bpo8++kjt2rU75dj4+Hi1bdtWO3fuLHZMdHS0V6m5S2RkZFDfLMHeHwAEC9cnAKGK6xMQGuwnQogMj9AlyTjd2wur5ji5zkxGQb57XKh9P+fl5blvJyYmKjIystjXVJgtN/eUY12vNSkpSZZlyRijzMzMkDsHAKoefn4CEKoq+/XJn7kXvzpjGYuKilLnzp2LlCmtXLlS3bp1K9W+n3jiCT3yyCP68MMP1aVLl9OOz83N1fbt21W/fv1SHRcAAAAAwl2Gx5oziTZ7seMSbSd/Hc3wCGpCTWZmpvt2MFtae7IsS/Hx8UWOBwAAgKqrQtuajRs3TkOHDlWXLl3UtWtXvfDCC9q7d69uueUWScdbie3fv18LFixwP8dVUp6RkaHff/9dW7duVVRUlFq3bi3peCuziRMnauHChUpJSXFX5iQkJCghIUGSNH78eA0cOFBnnnmmDh48qEcffVRpaWkaPnx4Ob56AAAAAKh60p0ng5YEW/F/DxhtWYq0LOUbozSPQCfUeIYlrgClLMTHxysjI0MZGRlldgwAAACEjgoNZ66//nodPnxYU6dOVWpqqs455xwtW7ZMjRs3liSlpqZq7969Xs/p2LGj+/aWLVu0cOFCNW7cWHv27JEkPffcc8rLy9OgQYO8njdp0iRNnjxZkvTLL7/ohhtu0KFDh1S7dm1dcMEF2rBhg/u4AAAAAIDAeFXO2IsPZyzLUqLNpiMOh9dzQo3nejBlGc4kJCTot99+o3IGAAAgTFRoOCNJt912m2677Tafj82bN6/INmPMKffnCmlOZdGiRSWZGgAAAADAT+mOkrU1k45X1hxxOJReSdqalXXljCQqZwAAAMJEha05AwAAAACoejJK2NZMOhneZDqdp/1DvIpSHmvOSCfDmdzcXDlCOKwCAABAcBDOAAAAAACCJt2zrdlpwpmEE23PHJKyKkE4U9ZtzXwdEwAAAFUT4QwAAAAAIGgyPNqaJdhP3dbMM7zJCNFqkfJac8Zz37Q2AwAAqPoIZwAAAAAAQZN+oq1ZjGUp0rJOOTbBY00az4qbUELlDAAAAMoC4QwAAAAAIGgyToQsp1tvRpIS7R6VM5UgnCmPNWckKmcAAADCAeEMAAAAACBoXBUwiadpaSZ5BziuiptQU15tzaicAQAACC+EMwAAAACAoHAYo0x/Kmc82pp5rlUTSsqrrRmVMwAAAOGFcAYAAAAAEBSZHq3JEksUznhWzoR+OFOWbc2onAEAAAgvhDMAAAAAgKDwDFgSbCVoa2YP/bZmrDkDAACAskA4AwAAAAAIigzHyYAl0e5fW7P0EG1r5lpzJiYmRvYSrKMTKCpnAAAAwgvhDAAAAAAgKLwrZ/xra5YR4pUzZbneTOH9UzkDAABQ9RHOAAAAAACCIsNrzZnTV5kkelSihPqaM2XZ0kyicgYAACDcEM4AAAAAAILCc92YkrQ186yuyQjRtmZUzgAAAKAsEM4AAAAAAILCM2Dxt61Zegi2NTPGuNecKc9whsoZAACAqo9wBgAAAAAQFOl+tjWLttkUaVlFnhsq8vLy5HAcD43KOpyhrRkAAEB4IZwBAAAAAARFhkf1S0kqZ6ST1TMZIRjOeIYkZb3mDG3NAAAAwgvhDAAAAAAgKNI92pol2k9fOSOdDHHSHaHX1swznKFyBgAAAMEUcDjzyiuv6MILL1SDBg30888/S5JmzZqlt99+O2iTAwAAAABUHp7VLyWvnDke4mQ6nTLGlMm8AuVab0Yq3zVnqJwBAACo+gIKZ+bMmaNx48apf//+Onr0qLsHb/Xq1TVr1qxgzg8AAAAAUEmke7Q1SyxhOJNgPz7OISkrxMKZ8qycsdvtiomJKXJcAAAAVE0BhTPPPPOMXnzxRU2YMEF2j1L1Ll266Ntvvw3a5AAAAAAAlYdX5UwJ25p5hjgZIdbarDzXnJFOBkBUzgAAAFR9AYUzu3fvVseOHYtsj46O5i98AAAAACBMudacibYsRVlWiZ6TYDsZ4qR7hDuhoDzbmkkn153h92oAAICqL6BwpkmTJtq6dWuR7R988IFat25d2jkBAAAAACohV1uzkq43I0mJ9pNjPduihYLybGvmeQwqZwAAAKq+iECedM899+j2229XTk6OjDHauHGjXn/9dU2fPl3//ve/gz1HAAAAAEAl4GprlmgrWUuz42M9whlHaFXOlHdbM1flTFZWlpxOp2x+hFwAAACoXAIKZ0aOHKmCggLde++9ysrK0uDBg5WcnKzZs2frz3/+c7DnCAAAAAAIcU5jlOkKZ+x+VM54BDkZIdbWrKIqZ4wxys7OLpdjAgAAoGIEFM5I0s0336ybb75Zhw4dktPpVJ06dYI5LwAAAABAJZLpdMqcuJ3oR8VHQgi3NauoNWek48EQ4QwAAEDVFVCN9O7du7Vz505JUq1atdzBzM6dO7Vnz56gTQ4AAAAAUDmke1S9JATY1iwjhNualWfljMS6MwAAAFVdQOHMiBEjtG7duiLbv/jiC40YMaK0cwIAAAAAVDIZHlUv/rQ18wxy0kO4rVl5rDnjGc54HhsAAABVT0DhzFdffaULL7ywyPYLLrhAW7duLe2cAAAAAACVTLrDs3LGnzVnPCpnQqytWXlXzhRuawYAAICqK6BwxrIspaenF9l+7NgxORyh9cM0AAAAAKDsZXhUvST609bMHrqVM+W95gxtzQAAAMJHQOFM9+7dNX36dK8gxuFwaPr06brooouCNjkAAAAAQOWQHnBbM9accaFyBgAAIHxEBPKkGTNm6OKLL1aLFi3UvXt3SdLatWuVlpamTz75JKgTBAAAAACEvowgtDVLD+G2ZuW95gyVMwAAAFVbQJUzrVu31jfffKPrrrtOBw8eVHp6uoYNG6YdO3bonHPOCfYcAQAAAAAhLj3AtmbRNpsiLavIPkJBebc1o3IGAAAgfARUOSNJDRo00LRp04I5FwAAAABAJeVZ9eJP5Yx0vHrmiMPhtW5NKPAMSGJjY8v8eFTOAAAAhI+Aw5mjR49q48aNOnjwoJyFfoAeNmxYqScGAAAAAKg80j3amiXaS145Ix0Pc444HEp3hGZbs9jYWNn8DJwCQeUMAABA+AgonHn33Xc1ZMgQZWZmKjExUdaJEnRJsiyLcAYAAAAAwoxn1Yu/lTNJdruUn68Mp1PGGK/fMSuSKyApj5ZmhY9D5QwAAEDVFtCf/tx9990aNWqU0tPTdfToUf3xxx/uf0eOHAn2HAEAAAAAIc6zrVmin+GMK8xxKrRCCdeaM+UVzlA5AwAAED4CCmf279+vu+66S3FxccGeDwAAAACgEvKqnPGzrZlnmHPs2LGgzam0qJwBAABAWQkonOnbt682b94c7LkAAAAAACop15oz0ZalKD/bkiXYToY5oRLOGGPc4Ux5/WEilTMAAADhI6A1ZwYMGKB77rlH3333ndq2bavIyEivx//v//4vKJMDAAAAAFQOrrZm/q43I0mJ9tCrnMnJyZExRhKVMwAAAAi+gMKZm2++WZI0derUIo9ZliWHw1FkOwAAAACg6nK1NUu0+dfSTPIOdEIlnHGtNyNVTDhD5QwAAEDVFlA44/ToJQwAAAAACG9Op1OZrnDGHkDlTAi2NfMMR8ornImKilJERIQKCgoIZwAAAKq4gNacAQAAAADAJT09XebE7UDamiWEYFszz3CkvNacsSzLHQTR1gwAAKBqC6hyRjr+g+qaNWu0d+9e5eXleT121113lXpiAAAAAIDKwTNQCaStWSJtzdwSEhJ07NgxKmcAAACquIDCma+++kr9+/dXVlaWMjMzVaNGDR06dEhxcXGqU6cO4QwAAAAAhBHPQCUhgLZmCbQ1K3IsKmcAAACqtoDamo0dO1YDBw7UkSNHFBsbqw0bNujnn39W586d9eSTTwZ7jgAAAACAEOZdORPImjOhVzlTUeFMQkKC+/jGmNOMBgAAQGUVUDizdetW3X333bLb7bLb7crNzVWjRo00Y8YMPfjgg8GeIwAAAAAghHlVzgTS1sx+8jlHjx4NxpRKrSLWnJFOBkEFBQVFWogDAACg6ggonImMjJRlWZKkunXrau/evZKkatWquW8DAAAAAMJDVaycqcg1Z1xYdwYAAKDqCmjNmY4dO2rz5s1q3ry5evXqpYcffliHDh3SK6+8orZt2wZ7jgAAAACAEFbaNWdCMZyp6DVnpOPrztSoUaPcjg0AAIDyE1DlzLRp01S/fn1J0iOPPKKaNWvq1ltv1cGDB/X8888HdYIAAAAAgNDmXTnjf1uzKJtNUSe6M4RiOFOebc2onAEAAAgPAVXOdOnSxX27du3aWrZsWdAmBAAAAACoXLzXnAnobwCVaLPpsMMRkuFMRVbOAAAAoGoK6KfmSy65xOcijWlpabrkkktKOycAAAAAQCXiVTkTQFsz6WSoEyrhDGvOAAAAoCwF9FPz6tWrlZeXV2R7Tk6O1q5dW+pJAQAAAAAqD+/KGf/bmklSov3489LS0mSMCcq8SoPKGQAAAJQlv9qaffPNN+7b3333nQ4cOOC+73A49OGHHyo5OTl4swMAAAAAhDzvNWdKVznjdDqVkZGhxMTEoMwtUBW15oxnOEPlDAAAQNXlVzjToUMHWZYly7J8ti+LjY3VM888E7TJAQAAAABCXzDCGc/nHTt2LKTCGdqaAQAAINj8Cmd2794tY4yaNm2qjRs3qnbt2u7HoqKiVKdOHdntgZWwAwAAAAAqJ1c4E2VZigq4cubk75LHjh1Tw4YNgzK3QFXUmjO0NQMAAAgPfoUzjRs3Vn5+voYNG6YaNWqocePGZTUvAAAAAEAl4QpnAq2akaREu3flTEWjcgYAAABlye+fnCMjI/X222+XxVwAAAAAAJWQK0xJKEU4k2ALzXDGsizFxMSU23GpnAEAAAgPAf3kfNVVV2np0qVBngoAAAAAoLJxOp1KS0uTJCWWos11okdbs6NHj5Z2WqXmamsWFxcny7LK7bhUzgAAAIQHv9qauZx11ll65JFHtG7dOnXu3LlIifddd90VlMkBAAAAAEJbRkaGjDGSSlc5E6ptzcqzpVnh41E5AwAAUHUFFM78+9//VvXq1bVlyxZt2bLF6zHLsghnAAAAACBMeAYppVpzxqNyJpTCmbi4uHI9LpUzAAAA4SGgcGb37t3BngcAAAAAoBLyDFISbIG3NQvVNWeonAEAAEBZCPzPmk4wxrhL2AEAAAAA4cUzSEmyV422ZsYY95oz5R3OUDkDAAAQHgL+yXnBggVq27atYmNjFRsbq3bt2umVV14J5twAAAAAACEueJUzodPWLDs72327vMOZ2NhYWZYlicoZAACAqiygtmYzZ87UxIkTdccdd+jCCy+UMUaff/65brnlFh06dEhjx44N9jwBAAAAACEoeGvOhE7ljGfFSnmvOWNZluLj45WRkUHlDAAAQBUWUDjzzDPPaM6cORo2bJh725VXXqk2bdpo8uTJhDMAAAAAECa8KmdK09YsRMOZ8q6ccR0zIyODyhkAAIAqLKCfnFNTU9WtW7ci27t166bU1NRSTwoAAAAAUDl4V84E3tYsymZT1Il2XhUdzrjWm5EqLpyRWHMGAACgKgsonDnrrLP0n//8p8j2/2fvvsOjqtM2jt9T0is19I6KoIjBLiKLIEVFIYDAWl5FF7GA6CqgrsruiooFewMbShNiQRCIS4uKKN2CjSLFhBII6clkMu8fYYYZ0ibJJNO+n+vK5ZkzZ875DZLDmbnP8/wWLFigzp0713pQAAAAAAD/4DrnTM0rZ6ST1TPeDme8XTkTHR1dZhwAAAAILDVqa/b4449r5MiRWrdunS655BIZDAZ99dVX+t///lduaAMAAAAACEwulTO1aGsmlYY7GVarT4Uz9T3njHQyECooKJDVapXJVPOKJAAAAPimGl05Dxs2TBs2bFDjxo31ySefKDk5WY0bN9Z3332n6667ztNjBAAAAAD4KNfKmdqFCDEnQoisrCyVlJTUal+14e22ZvbKGYnqGQAAgEBVo8oZSUpMTNQHH3zgybEAAAAAAPyM65wzta+ckaSSkhLl5OQoNja2VvurKW+3NXM+pjf/HAAAAFB3ahzOWK1Wffzxx9qxY4cMBoO6dOmiIUOGyGyu8S4BAAAAAH7Gk+FMrFPlzfHjx4M2nKFyBgAAIPDVKEn58ccfNWTIEKWnp+v000+XJP32229q0qSJPvvsM5111lkeHSQAAAAAwDdlZmZKkkINBoXWtnLGac6a48ePq3Xr1rXaX035ypwzUmnlDAAAAAJPja6cx44dq65du2r//v3avHmzNm/erH379unss8/W7bff7ukxAgAAAAB8lL1yprZVM6fuw7kip74x5wwAAADqWo0qZ7Zt26aNGzeqQYMGjnUNGjTQf//7X5133nkeGxwAAAAAwLfZQ5RoD4Qz0ae0NfMWb7c1o3IGAAAg8NXo6vn000/XwYMHy6w/dOiQOnXqVOtBAQAAAAB8X0lJibKysiRJMSZTFVtXzVcqZ7zd1ozKGQAAgMBXo3DmiSee0D333KNFixZp//792r9/vxYtWqSJEyfqqaeeUlZWluMHAAAAABCYcnJyZLPZJHmocsbke+EMlTMAAACoCzVqa3bVVVdJkkaMGCGDwSBJjgvyq6++2vHYYDDIarV6YpwAAAAAAB/jHKB4Zs4Z32hrxpwzAAAAqGs1CmdWr17t6XEAAAAAAPyMc4DiPF9MTUX7YFszb1fOEM4AAAAEphrd2tS7d2+3f6ry6quvqn379goPD1diYqJSU1Mr3DYtLU2jR4/W6aefLqPRqIkTJ5a73eLFi3XmmWcqLCxMZ555pj7++ONaHRcAAAAAUJZL5YzJA5UzPtjWzBtzztDWDAAAIPDV+Oq5oKBA3333nT7//HN99tlnLj/uWrBggSZOnKiHHnpIW7ZsUa9evTRw4EDt3bu33O0LCwvVpEkTPfTQQ+revXu526xfv14jR47UDTfcoG3btumGG27QiBEjtGHDhhofFwAAAABQlmvljAfmnPGRtmberpyhrRkAAEDgq1Fbs+XLl+vGG2/UkSNHyjxXnXlmnnvuOd16660aO3asJGnmzJlasWKFXnvtNU2fPr3M9u3atdMLL7wgSXr77bfL3efMmTPVr18/TZkyRZI0ZcoUrV27VjNnztS8efNqdFwAAAAAQFmuc87Uvq1ZjI+0NbPPOWM0GhUWFlbvx6dyBgAAIPDVKJy56667NHz4cP3rX/9SQkJCjQ5cVFSkTZs2afLkyS7r+/fvr2+++aZG+5RKK2fuvfdel3VXXnmlZs6cWavjFhYWqrCw0PE4KytLkmSxWGSxWGo8Xjv7PjyxLwDwJM5PAHwV5yfA+44ePepYjgoLlbWWQUZkSIhj+dixY177/bYHIlFRUSouLi7zfFXvs+TE8yVVbFfR+3MOhLKzsznPAfAYrp8A+KpAOT9VZ/w1CmcOHTqkSZMm1TiYkaQjR47IarWW2UdCQoLS09NrvN/09PRK91nT406fPl2PP/54mfUrV670aA/ilJQUj+0LADyJ8xMAX8X5CfCe7777zrGcM3q0dl58ca33GTpihIqKirR//34tW7as1vuriYyMDEmSyWQqfwzTyn42LM/uh6ZW+vzOCt6fc+i1c+dOr/05AAhcXD8B8FX+fn6yV2C7o0bhTFJSktasWaOOHTvW5OUuDAaDy2ObzVZmXV3ss7rHnTJliiZNmuR4nJWVpdatW6t///6KjY2t1Xil0kQtJSVF/fr1U4jT3WIA4G2cnwD4Ks5PgPc5dx/o9NEidVy+otb7bNCggQ4ePCibzaZBgwbVen81YbPZJEkNGzYsdwy/9jyv0teXhIVp90NT1f6/T8jo1IHhVKdv/L7c9fZODZIUExPjtT8HAIGH6ycAvipQzk/O13FVqVE48/LLL2v48OFKTU3VWWedVeYP65577qlyH40bN5bJZCpTrXLo0KFaVeQ0a9as0n3W9LhhYWHl9hoOCQnx6F8WT+8PADyF8xMAX8X5CfAe5/lQ4qxWmSoJItwVFxengwcP6vjx41773c7NzZVU2tasvDG4+z6NhYWVblvR+4uPj3cZC+c4AJ7G9RMAX+Xv56fqjL1G4czcuXO1YsUKRUREaM2aNS4VJwaDwa1wJjQ0VImJiUpJSdF1113nWJ+SkqIhQ4bUZFiSpIsuukgpKSku886sXLlSF58or6+r4wIAAABAsDl+/LhjOcZk8sg+4+LiJJXedVhSUiKj0eiR/bqrpKRE+fn5kuTRFtbVYTKZFB4eroKCAkdQBAAAgMBSo3Dm4Ycf1rRp0zR58uRaXShPmjRJN9xwg3r27KmLLrpIb775pvbu3atx48ZJKm0lduDAAb3//vuO12zdulVS6R1ahw8f1tatWxUaGqozzzxTkjRhwgRddtlleuqppzRkyBB9+umn+vLLL/XVV1+5fVwAAAAAQNWcw5loD4Uo9nDGZrMpJyfHI22kq8O5T3hUVFS9HttZVFSUCgoKXKqTAAAAEDhqFM4UFRVp5MiRtb6DaeTIkcrIyNC0adOUlpambt26admyZWrbtq0kKS0tTXv37nV5TY8ePRzLmzZt0ty5c9W2bVvt2bNHknTxxRdr/vz5evjhh/XII4+oY8eOWrBggS644AK3jwsAAAAAqJpL5YyHwxn7/oM1nImOjlZGRgaVMwAAAAGqRuHMTTfdpAULFmjq1Km1HsD48eM1fvz4cp979913y6yzT8xYmaSkJCUlJdX4uAAAAACAqtnDmdDQUIXVUTjTunVrj+zXXc5hiLcrZyRROQMAABCgahTOWK1WPf3001qxYoXOPvvsMpPcPPfccx4ZHAAAAADAd9nDGedApbZODWfqm3M44605Z6TSyhmptJLHZrO5zPUKAAAA/1ejcOaHH35wtBf78ccfPTogAAAAAIB/CPRwxhcqZ2w2m/Lz870aFAEAAMDzahTOrF692tPjAAAAAAD4EZvNpqysLEknApUcz8yN4u1wxlfmnHE+dk5ODuEMAABAgKlWODN06NAqtzEYDFq8eHGNBwQAAAAA8H05OTkqKSmRFFjhjK9UztjbmkmuYwIAAEBgqFY448lSdQAAAACA/3IOTuLi4qQDf3lkv86fOzMzMz2yz+rwlTlnTq2cAQAAQGCpVjjzzjvv1NU4AAAAAAB+pEw44yHx8fHlHqO++EpbMypnAAAAApvR2wMAAAAAAPifugpnaGtW9thUzgAAAAQewhkAAAAAQLUFQzjjzbZmVM4AAAAENsIZAAAAAEC1BUM4Q+UMAAAA6grhDAAAAACg2gI1nGHOGQAAANQHwhkAAAAAQLXVVTgTGhqq8PDwMseoL1TOAAAAoD4QzgAAAAAAqs05OImPj/fovu1hj7fDGeacAQAAQF0hnAEAAAAAVFtdVc4478/b4QyVMwAAAKgrhDMAAAAAgGqrj3AmKytLJSUlHt13VZhzBgAAAPWBcAYAAAAAUG31Ec7YbLZ6rxrxxcoZwhkAAIDAQzgDAAAAAKi2+ghnTj1OfbAHISaTSSEhIfV6bGe0NQMAAAhshDMAAAAAgGqzhyahoaEKDw/36L59IZyJioqSwWCo12M7o60ZAABAYCOcAQAAAABUmz008XTVzKn7zMzM9Pj+K2Ofc8abLc1OPT6VMwAAAIGHcAYAAAAAUG11Gc7Ex8eXOU59ca6c8abQ0FCZzWZJVM4AAAAEIsIZAAAAAEC12Gy2equc8VY4ExkZWa/HPZXBYHAERFTOAAAABB7CGQAAAABAteTk5KikpERSYIUzVqtVhYWFkrxfOSOdnHeGyhkAAIDAQzgDAAAAAKgW58AkkMIZ+3wzkm+EM1TOAAAABC7CGQAAAABAtQRqOONcoeLttmaSa+WMzWbz8mgAAADgSYQzAAAAAIBqCYZwxpcqZ4qLi1VUVOTl0QAAAMCTCGcAAAAAANUSqOGMr7U1s1fOSMw7AwAAEGgIZwAAAAAA1RKo4YyvVs5IzDsDAAAQaAhnAAAAAADVEgzhjC/NOSNROQMAABBoCGcAAAAAANVS1+FMaGiowsPDyxyrrvly5QzhDAAAQGAhnAEAAAAAVEtdhzPO+w3mOWdoawYAABC4CGcAAAAAANVSn+FMZmZmney/PL5WOUNbMwAAgMBFOAMAAAAAqJb6CGfi4+MlSdnZ2SopKamTY5zK1+acoXIGAAAgcBHOAAAAAACqpT4rZ2w2m7Kzs+vkGKfytbZmVM4AAAAELsIZAAAAAEC11Gc4c+rx6pKvtTWjcgYAACBwEc4AAAAAAKrFHpaEhIQoPDy8To7h7XDGF9qaUTkDAAAQuAhnAAAAAADVYg9L4uLiZDAY6uQY3g5nqJwBAABAXSKcAQAAAABUi3M4U1e8Ec4w5wwAAADqC+EMAAAAAMBtNptNWVlZkgIvnKFyBgAAAPWFcAYAAAAA4Lbc3FxZrVZJgR3OMOcMAAAA6hLhDAAAAADAbc5BSSCHM1TOAAAAoC4RzgAAAAAA3BbI4Yx9zpmQkBCFhITUyzErQ+UMAABA4CKcAQAAAAC4LZDDGXsA4gtVM5IUEREhg8EgiXAGAAAg0BDOAAAAAADc5o1wJjMzs86O48wegPjCfDOSZDAYHGOhrRkAAEBgIZwBAAAAALitvsKZ+Pj4co9Zl+xtzXylckY62dqMyhkAAIDAQjgDAAAAAHAbbc3ql30sVM4AAAAEFsIZAAAAAIDb6iucCQkJUURERJlj1pXi4mIVFRVJ8q1whsoZAACAwEQ4AwAAAABwW32FM877r49wxjn88JU5Z6STQVFBQYGsVquXRwMAAABPIZwBAAAAALgtUMMZ+3wzkm9WzkhUzwAAAAQSwhkAAAAAgNu8Ec5kZ2erpKSkTo/lHHz4UjjjPBbmnQEAAAgchDMAAAAAALd5I5yx2WzKzs6u02P5alszKmcAAAACE+EMAAAAAMBtzuFMfHx8nR7LOfyp69ZmVM4AAACgPhHOAAAAAADcZg9JQkJCFB4eXqfHqs9whjlnAAAAUJ8IZwAAAAAAbrOHJHFxcTIYDHV6LCpnXMdCOAMAABA4CGcAAAAAAG5zDmfqmrfCGV+dc4a2ZgAAAIGDcAYAAAAA4Babzea1cCYzM7NOj0XlDAAAAOoT4QwAAAAAwC15eXmyWq2S6ieciY+PdywH65wzzmOhcgYAACBwEM4AAAAAANziHJAEclszXwpnnNuaUTkDAAAQOAhnAAAAAABuCZZwxpfmnKFyBgAAIDARzgAAAAAA3BLI4YyvtjWjcgYAACAwEc4AAAAAANySmZnpWA60cMZX25pROQMAABCYCGcAAAAAAG4J5MoZX21rRuUMAABAYCKcAQAAAAC4JVjCGSpnAAAAUNcIZwAAAAAAbqnvcCYkJEQRERFljl0XmHMGAAAA9YlwBgAAAADglvoOZ5yPE6xtzaicAQAACEyEMwAAAAAAtwRDOBMaGiqz2Vynx6oOk8mk8PBwSVTOAAAABBLCGQAAAACAW7wZzmRnZ6ukpKTOjmMPPnyppZmdfUyEMwAAAIGDcAYAAAAA4BZvhjM2m01ZWVl1dhz7nDO+GM7Y552hrRkAAEDgIJwBAAAAALjFG+FMfHx8ucf3NCpnAAAAUJ8IZwAAAAAAbrGHI2azWREREfVyTOcQqD7CmcjIyDo7Rk05hzM2m83LowEAAIAnEM4AAAAAANxiD0fi4uJkMBjq5Zj1Ec5YLBYVFxdL8s3KGXtbM5vNpvz8fC+PBgAAAJ5AOAMAAAAAcItzOFNf6iOccW4X5ovhjPOYmHcGAAAgMBDOAAAAAACqZLPZCGe8xF45IzHvDAAAQKAgnAEAAAAAVCkvL09Wq1VSYIczvjznjETlDAAAQKAgnAEAAAAAVMk5GAm0cCYvL8+xTOUMAAAA6gPhDAAAAACgSoEczvh6WzMqZwAAAAIP4QwAAAAAoErBEs74YlszKmcAAAACD+EMAAAAAKBKwRLOUDkDAACA+kA4AwAAAACoUiCHM8w5AwAAgPpGOAMAAAAAqJIvhDOZmZl1cgx/qpwhnAEAAAgMXg9nXn31VbVv317h4eFKTExUampqpduvXbtWiYmJCg8PV4cOHfT666+7PH/55ZfLYDCU+Rk8eLBjm8cee6zM882aNauT9wcAAAAAgcBb4UxISIhjHhjmnKGtGQAAQKDwajizYMECTZw4UQ899JC2bNmiXr16aeDAgdq7d2+52+/evVuDBg1Sr169tGXLFk2dOlX33HOPFi9e7NgmOTlZaWlpjp8ff/xRJpNJw4cPd9lX165dXbb74Ycf6vS9AgAAAIA/81Y443w85pyhcgYAACBQmL158Oeee0633nqrxo4dK0maOXOmVqxYoddee03Tp08vs/3rr7+uNm3aaObMmZKkLl26aOPGjXrmmWc0bNgwSVLDhg1dXjN//nxFRkaWCWfMZjPVMgAAAADgJm+HM2lpaUE754zzmKicAQAACAxeC2eKioq0adMmTZ482WV9//799c0335T7mvXr16t///4u66688krNnj1bFotFISEhZV4ze/ZsXX/99WUusH///Xe1aNFCYWFhuuCCC/TEE0+oQ4cOFY63sLBQhYWFjsdZWVmSJIvFIovFUvmbdYN9H57YFwB4EucnAL6K8xNQv44dO+ZYjoyMLPO7Zw0L88hxyvudjo2NlSRlZ2eroKBAJpPJI8eyy87OdiyHhYVVeV6p6r2WnHi+pIrt3D1/hTntJzs7m/MegBrj+gmArwqU81N1xu+1cObIkSOyWq1KSEhwWZ+QkKD09PRyX5Oenl7u9sXFxTpy5IiaN2/u8tx3332nH3/8UbNnz3ZZf8EFF+j999/XaaedpoMHD+o///mPLr74Yv30009q1KhRuceePn26Hn/88TLrV65c6dGexCkpKR7bFwB4EucnAL6K8xNQP3777TfH8qZNm7R7927XDaaV/bxUEzuXLSuzrqioyLG8ePFilzlYPOGXX35xLG/cuFFHjhyp/AVuvtfdD02t9Pny3mt5Dh8+7Fj+448/tMzN1wFARbh+AuCr/P385FyRXRWvtjWTJIPB4PLYZrOVWVfV9uWtl0qrZrp166bzzz/fZf3AgQMdy2eddZYuuugidezYUe+9954mTZpU7nGnTJni8lxWVpZat26t/v37O+7iqg2LxaKUlBT169ev3AogAPAWzk8AfBXnJ6B+Pf/8847l6667rkx3gl97nueR45y+8fsy6+bMmaOtW7dKKr3Zrm3bth45lt28efMcywMGDKi0q4JU9XstCQvT7oemqv1/n5DRqQPDqcp7r+U5evSoYzk2NlaDBg1y63UAcCqunwD4qkA5P9k7brnDa+FM48aNZTKZylTJHDp0qEx1jF2zZs3K3d5sNpepeMnLy9P8+fM1bdq0KscSFRWls846S7///nuF24SFhbmUktuFhIR49C+Lp/cHAJ7C+QmAr+L8BNQP+wdNk8mkuLi4MjfImSoJIaqjvN/nBg0aOJbz8vI8/jufn5/vWI6Pj69y/+6+V2NhYaXbuvs+4uPjHcu5ubmc8wDUGtdPAHyVv5+fqjN2Yx2Oo1KhoaFKTEwsU6aUkpKiiy++uNzXXHTRRWW2X7lypXr27FnmTS9cuFCFhYX6+9//XuVYCgsLtWPHjjJt0QAAAAAApY4fPy5J5QYzdS0uLq7MODwpNzfXsezJttWeEhoaKrO59N5K57ECAADAf3ktnJGkSZMmadasWXr77be1Y8cO3Xvvvdq7d6/GjRsnqbSV2I033ujYfty4cfrzzz81adIk7dixQ2+//bZmz56t+++/v8y+Z8+erWuvvbbcOWTuv/9+rV27Vrt379aGDRuUlJSkrKws3XTTTXX3ZgEAAADAjzmHM/Ut2MMZg8HgaCOXk5Pj5dEAAADAE7w658zIkSOVkZGhadOmKS0tTd26ddOyZcsc/YPT0tK0d+9ex/bt27fXsmXLdO+99+qVV15RixYt9OKLL2rYsGEu+/3tt9/01VdfaeXKleUed//+/Ro1apSOHDmiJk2a6MILL9S3337r8b7FAAAAABAIbDabIxRxbrFVX+o6nLFP3BoeHi6TyeTx/XtCdHS0jh8/TuUMAABAgPBqOCNJ48eP1/jx48t97t133y2zrnfv3tq8eXOl+zzttNNks9kqfH7+/PnVGiMAAAAABLP8/HwVFxdLCuzKGXt1ii+icgYAACCweLWtGQAAAADA9zkHIt4OZzIzMz2+f3s444stzeyio6MlMecMAABAoCCcAQAAAABUypfCmWCvnCkuLlZRUZGXRwMAAIDaIpwBAAAAAFTK2+GM8zw3ng5nbDabY84ZXw5n7JUzEq3NAAAAAgHhDAAAAACgUt4OZ+qycqaoqEhWq1WSb4czzmOjtRkAAID/I5wBAAAAAFQqkMMZ56DDl+eccQ5nqJwBAADwf4QzAAAAAIBKeTuciY2NLXcsnmBvaSb5duWMc1szKmcAAAD8H+EMAAAAAKBS3g5nQkJCHFUtdVk548vhDJUzAAAAgYVwBgAAAABQqczMTMeyN8IZ5+MGazhD5QwAAEBgIZwBAAAAAFTK25UzzsdlzhkqZwAAAAIB4QwAAAAAoFK+FM5kZ2fLarV6bL/MOQMAAABvIJwBAAAAAFTKl8IZqTSg8RR/aWtG5QwAAEBgIZwBAAAAAFTK18IZT7Y285e2ZlTOAAAABBbCGQAAAABApexhiMlk8lqA4RzOZGZmemy/VM4AAADAGwhnAAAAAACVsoczcXFxMhgMXhlDXVXOMOcMAAAAvIFwBgAAAABQKedwxlvi4+Mdy3XV1syXwxnnsRHOAAAA+D/CGQAAAABAhWw2m0+EM8w5c7JyhrZmAAAA/o9wBgAAAABQofz8fBUXF0sKzHDGX9qaUTkDAAAQWAhnAAAAAAAVcg5CAjGc8ce2ZlTOAAAA+D/CGQAAAABAhQhnfENERIQMBoMkKmcAAAACAeEMAAAAAKBCwRTO+PKcM0aj0TE+KmcAAAD8H+EMAAAAAKBCgR7O+MucM5IUHR0ticoZAACAQEA4AwAAAACoUKCHM85BR0REhMf2Wxfs4RGVMwAAAP6PcAYAAAAAUCFfCWdiY2Mdy3URzkRERMho9O2PyFTOAAAABA7fvvIEAAAAAHiVr4QzISEhjjlX6iKc8fWWZtLJMRYUFMhqtXp5NAAAAKgNwhkAAAAAQIV8JZxxPn5mZqbH9mmfc8Yfwhl75YxE9QwAAIC/I5wBAAAAAFTIF8OZYK+ckZh3BgAAwN8RzgAAAAAAKuRL4Ux8fLyk0mDCE229bDabI5yxt0zzZVTOAAAABA7CGQAAAABAhXwpnHE+flZWVq33V1BQIJvNJsn/KmcIZwAAAPwb4QwAAAAAoEK+Gs54orWZfb4ZyT/CGefKGdqaAQAA+DfCGQAAAABAhewhiMlk8nqA4elwxrn6xNvvzR1UzgAAAAQOwhkAAAAAQIXsIUhsbKwMBoNXx1KX4Yw/zDnjHM5QOQMAAODfCGcAAAAAABWyhyDebml26hiCva0ZlTMAAAD+jXAGAAAAAFAum80W0OGMP7c1o3IGAADAvxHOAAAAAADKVVBQIIvFIolwxhdQOQMAABA4CGcAAAAAAOVyDkACPZxhzhkAAADUJ8IZAAAAAEC5Aj2cYc4ZAAAAeAvhDAAAAACgXM4BSHx8vPcGcoJzOJOZmVnr/flbWzMqZwAAAAIH4QwAAAAAoFyBXjnjb23NqJwBAAAIHIQzAAAAAIBy+Vo441y94+lwxt8qZwhnAAAA/BvhDAAAAACgXL4WzsTGxjqWg33OGdqaAQAA+DfCGQAAAABAuXwtnDGbzY4QhcoZKmcAAAD8GeEMAAAAAKBcvhbOSCfHEYxzzphMJoWHh0uicgYAAMDfEc4AAAAAAMoV6OGMv7U1k06Ok8oZAAAA/0Y4AwAAAAAoly+HMzk5ObJarbXal7+1NZNOjpPKGQAAAP9GOAMAAAAAKJcvhzOSlJWVVat9+WM4Ex0dLYnKGQAAAH9HOAMAAAAAKFdmZqZj2RfDmdq2NrMHHAaDwTGXi69zbmtms9m8PBoAAADUFOEMAAAAAKBc9vDDZDL5TGWJJ8MZ+5wzkZGRMhgMtdpXfbFXzthsNuXn53t5NAAAAKgpwhkAAAAAQLns4UdsbKzPhBd1UTnjK8GTO5zHyrwzAAAA/otwBgAAAABQLnv44SstzSTXsTi3XasJezgTGRlZq/3UJ3vljMS8MwAAAP6McAYAAAAAUIbNZvP5cIbKGSpnAAAA/BXhDAAAAACgjIKCAlksFkm+Fc7Ex8c7lmsTzthsNsecM/4UzlA5AwAAEBgIZwAAAAAAZTgHH74UzniqciY/P9+x7E/hjPNYCWcAAAD8F+EMAAAAAKCMQA9nnIMNf51zhrZmAAAA/otwBgAAAABQRjCFM1TOAAAAoL4RzgAAAAAAygj0cMY+34zkX+EMlTMAAACBgXAGAAAAAFBGoIczVM4AAADAmwhnAAAAAABl+Go4Exsb61gOxjlnnMMZKmcAAAD8F+EMAAAAAKAMXw1nzGazI6AI9rZmVM4AAAD4L8IZAAAAAEAZvhrOSCfHE+xtzaicAQAA8F+EMwAAAACAMoIpnPGntmZUzgAAAAQGwhkAAAAAQBn+EM7k5OSouLi4RvugcgYAAADeRDgDAAAAACjDH8IZScrKyqrRPphzBgAAAN5EOAMAAAAAKMOXw5n4+HjHck1bm1E5AwAAAG8inAEAAAAAlGEPPYxGo0u1hi9wDos8Ec7405wzoaGhMpvNkqicAQAA8GeEMwAAAACAMuyhR2xsrAwGg5dH48rT4Yw/Vc4YDAbHeAlnAAAA/BfhDAAAAACgDHvo4WstzSTPhDP+OueMdHLeGdqaAQAA+C/CGQAAAABAGYEezvhr5YwkKmcAAAACAOEMAAAAAMBFQUGBioqKJAVHOONPc85IVM4AAAAEAsIZAAAAAIAL58AjUMMZf25rZh9vcXGxI0QDAACAfyGcAQAAAAC4CIZwxl45YzQaFRYW5pFx1RfnMInqGQAAAP9EOAMAAAAAcBFM4UxUVJQMBoNHxlVf7G3NJOadAQAA8FeEMwAAAAAAF8EUzvjbfDMSlTMAAACBgHAGAAAAAOAiGMIZ+5wz/jbfjETlDAAAQCAgnAEAAAAAuPD1cCY2NtaxnJmZWaN9OLc18zdUzgAAAPg/whkAAAAAgAvncCY+Pt57A6mA2Wx2VI/UpHKmpKRE+fn5kvyzrRmVMwAAAP6PcAYAAAAA4MLXK2ekk+OqSThjb2kmUTkDAAAA7yCcAQAAAAC4IJzxbVTOAAAA+D/CGQAAAACAC38KZ3Jzc1VcXFyt1zoHGv4YzjiPmXAGAADAP3k9nHn11VfVvn17hYeHKzExUampqZVuv3btWiUmJio8PFwdOnTQ66+/7vL8u+++K4PBUOanoKCgVscFAAAAgGDhT+GMJGVlZVXrtc6Bhr/POUNbMwAAAP/k1XBmwYIFmjhxoh566CFt2bJFvXr10sCBA7V3795yt9+9e7cGDRqkXr16acuWLZo6daruueceLV682GW72NhYpaWlufyEh4fX+LgAAAAAEEz8LZypbmszKmcAAADgbV4NZ5577jndeuutGjt2rLp06aKZM2eqdevWeu2118rd/vXXX1ebNm00c+ZMdenSRWPHjtUtt9yiZ555xmU7g8GgZs2aufzU5rgAAAAAEEzsYYfRaHSp0vAltQlnAmnOGSpnAAAA/JPZWwcuKirSpk2bNHnyZJf1/fv31zfffFPua9avX6/+/fu7rLvyyis1e/ZsWSwWhYSESCq9OG3btq2sVqvOOecc/fvf/1aPHj1qfFxJKiwsVGFhoeOxvWzeYrHIYrG4+a4rZt+HJ/YFAJ7E+QmAr+L8BNSdzMxMSaVdCdydz8UaFuaRY7v7O+0cUGRkZFTrXOAc5oSHh1f7PFLVey058XxJFdvV9PwVGhrqWM7OzuY8CMBtXD8B8FWBcn6qzvi9Fs4cOXJEVqtVCQkJLusTEhKUnp5e7mvS09PL3b64uFhHjhxR8+bNdcYZZ+jdd9/VWWedpaysLL3wwgu65JJLtG3bNnXu3LlGx5Wk6dOn6/HHHy+zfuXKlR7tUZySkuKxfQGAJ3F+AuCrOD8Bnnf48GFJUkhIiJYtW+bei6aV/bxUEzvdPN7Bgwcdy6tWrapWBcnXX3/tWP7zzz/df492br7X3Q9NrfR5d9/rqZw/u/7+++/VHz+AoMf1EwBf5e/nJ+cK7ap4LZyxMxgMLo9tNluZdVVt77z+wgsv1IUXXuh4/pJLLtG5556rl156SS+++GKNjztlyhRNmjTJ8TgrK0utW7dW//79FRsbW+Hr3GWxWJSSkqJ+/fo5KoAAwBdwfgLgqzg/AXUnPz9fktSsWTMNGjTIrdf82vM8jxz79I3fu7Xdnj179OGHH0qSOnXq5PY4pZPhkyT17NmzWq+Vqn6vJWFh2v3QVLX/7xMyOnVgOJW77/VUhw4dcizHxsZWe/wAghfXTwB8VaCcn+wdt9zhtXCmcePGMplMZapVDh06VKaqxa5Zs2blbm82m9WoUaNyX2M0GnXeeefp999/r/FxJSksLExh5ZSkh4SEePQvi6f3BwCewvkJgK/i/AR4VkFBgYqKiiRJ8fHxbv9+mSoJIarD3eM1bNjQsZybm1ut80BBQYFjOTY2ttrnEHffq7GwsNJta3ruio+Pdyzn5eVxDgRQbVw/AfBV/n5+qs7YjXU4jkqFhoYqMTGxTJlSSkqKLr744nJfc9FFF5XZfuXKlerZs2eFb9pms2nr1q1q3rx5jY8LAAAAAMHCeT6WuLg4L46kcs5js8+R467c3FzHsifbVNeXiIgIR+cH5/cCAAAA/+HVtmaTJk3SDTfcoJ49e+qiiy7Sm2++qb1792rcuHGSSluJHThwQO+//74kady4cXr55Zc1adIk3XbbbVq/fr1mz56tefPmOfb5+OOP68ILL1Tnzp2VlZWlF198UVu3btUrr7zi9nEBAAAAIFj5YzjjPGZ3OAcaUVFRHhtTfTEajYqMjFRubm615toBAACA7/BqODNy5EhlZGRo2rRpSktLU7du3bRs2TK1bdtWkpSWlqa9e/c6tm/fvr2WLVume++9V6+88opatGihF198UcOGDXNsk5mZqdtvv13p6emKi4tTjx49tG7dOp1//vluHxcAAAAAgpW/hDPOrb2qG844T9Tqj+GMJEVHRys3N5fKGQAAAD/l1XBGksaPH6/x48eX+9y7775bZl3v3r21efPmCvf3/PPP6/nnn6/VcQEAAAAgWPlLOBPMlTPSyXFTOQMAAOCfvB7OAAAAAAB8R7CFMzWZc2bElMo/SofJrEck3TzJrEJZK9zuh2of+aTo6GhJzDkDAADgr4zeHgAAAAAAwHf4SzgTGxvrWA7mypmCggJZrRUHQAAAAPBNhDMAAAAAAAd/CWdMJpOjeiRY55yxo3oGAADA/xDOAAAAAAAc/CWckU6OL5grZyTCGQAAAH9EOAMAAAAAcAimcMZkMikkJMTj46oPzpUzOTk5XhwJAAAAaoJwBgAAAADg4I/hTG5uroqLi91+nb2tWVRUlAwGQ52Mra5ROQMAAODfCGcAAAAAAA7+GM5IUlZWltuvs4cZ/trSTHIdO5UzAAAA/odwBgAAAADg4K/hTHVamwVCOOPc1ozKGQAAAP9DOAMAAAAAcLCHHAaDwSUA8EXO4UxmZqbbr7OHGZGRkZ4eUr2hcgYAAMC/Ec4AAAAAABzs4UxsbKyMRt/+yFiTyhmr1arCwkJJVM4AAADAe3z7ShsAAAAAUK/sIYevtzSTpPj4eMeyu+FMXl6eY9mfwxkqZwAAAPwb4QwAAAAAwMGfwpmaVM44V5n4c1szKmcAAAD8G+EMAAAAAECSVFhY6Gj5FQzhTKBUzhDOAAAA+B/CGQAAAACAJNeAI1DDmUBpa+ZcOUNbMwAAAP9DOAMAAAAAkBQc4QyVMwAAAPAFhDMAAAAAAEnBF84EypwzVM4AAAD4H8IZAAAAAICk4AhnAqWtGZUzAAAA/o1wBgAAAAAgKTjCmUBpa0blDAAAgH8jnAEAAAAASPK/cCY2NtaxHGzhDJUzAAAA/o1wBgAAAAAgyf/CGZPJ5KggCbY5Z0wmk8LCwiRROQMAAOCPCGcAAAAAAJL8L5yRTo4zMzPTre0DZc4Z6WRrMypnAAAA/A/hDAAAAABAkn+HM8HW1kw6OX4qZwAAAPwP4QwAAAAAQJJ/hjPx8fGSSitiLBZLldsHSlszicoZAAAAf2b29gAAAAAAAL7BOZyxhx6+zjlEysrKUqNGjSrd3hOVMz/s3lvp8xZjuJbFS+v/3KeQkoIaHcMd9vHn5ubKZrPJYDDU2bEAAADgWVTOAAAAAAAk+WfljPM43WltFohzzthsNuXn53t5NAAAAKgOwhkAAAAAgKST4YbBYHB88e/rqhvOBOKcMxKtzQAAAPwN4QwAAAAAQNLJcCM2NlZGo398XKxNOBMoc85IUk5OjhdHAgAAgOryj6ttAAAAAECds4cb/tLSTKJyxo7KGQAAAP9COAMAAAAAkBQc4Yx9zpmQkBCFhITU2bjqA5UzAAAA/otwBgAAAACgoqIiFRQUSArscMZeYeLvVTMSlTMAAAD+jHAGAAAAAOASbARDOOPv881IVM4AAAD4M8IZAAAAAEDQhDP2tmZUzgAAAMCbCGcAAAAAAMrMzHQs+2s44/weKhKobc2onAEAAPAvhDMAAAAAgKConCkuLlZRUZGkwAhnnNuaUTkDAADgXwhnAAAAAAB+G87Ex8c7lqsKZ5wDjECYc4bKGQAAAP9FOAMAAAAA8NtwJiYmxrFcVThjn29GonIGAAAA3kU4AwAAAADw23DGZDI5AprqVM4EQjhD5QwAAID/IpwBAAAAAPhtOCOdHG+wtTWjcgYAAMB/Ec4AAAAAAIIunAm0yhnCGQAAAP9COAMAAAAACIhwJi8vTxaLpcLtAnnOGdqaAQAA+BfCGQAAAABAQIQzkpSVlVXhdlTOAAAAwFcQzgAAAAAAAiacqay1WaDNORMaGiqz2SyJyhkAAAB/QzgDAAAAAHCEGgaDQTExMV4eTfW4G84EWlszg8HgeB9UzgAAAPgXwhkAAAAAgCPUiImJkdHoXx8Va1I5EwjhjHRy3hkqZwAAAPyLf11xAwAAAADqhD3U8LeWZpLrmDMzMyvcLhDDGSpnAAAA/BPhDAAAAAAgYMKZYJpzRjoZzlA5AwAA4F8IZwAAAAAgyBUVFamgoECSf4Yz8fHxjuVgmnNGOtnWrLi4WEVFRV4eDQAAANxFOAMAAAAAQc450PDHcCaY55xxfh9UzwAAAPgPwhkAAAAACHLBGM4ESlsze+WMxLwzAAAA/oRwBgAAAACCXDCGM1TOAAAAwJsIZwAAAAAgyAVLOBPIc85IVM4AAAD4E7O3BwAAAAAA8K5gCWcCsa2Zc8hEOANUn9VqVWpqqtLS0tS8eXP16tVLJpPJ28MCAAQBwhkAAAAACHL+Hs7ExMQ4lt0JZ0JDQ2U2B8bHYefKGdqaAdWTnJysCRMmaP/+/Y51rVq10gsvvKChQ4d6cWQAgGBAWzMAAAAACHL+Hs6YTCZHQONOOBMoLc0kKmeAmkpOTlZSUpJLMCNJBw4cUFJSkpKTk700MgBAsCCcAQAAAIAg5+/hjHRy3O7MORNI4QyVM0D1Wa1WTZgwQTabrcxz9nUTJ06U1Wqt76EBAIII4QwAAAAABLlgCWeonAEgSampqWUqZpzZbDbt27dPqamp9TgqAECwCYwmuwAAAACAGgukcCYvL08Wi0UhISFltrGHF5GRkfU6trpE5QxQtR1ndHF5vDEry63XbRw9RgmxsY7HXX7Z4dFxAQCCG5UzAAAAABDkAimckcqvnrFYLCouLpZE5QwQ7JqYTR7dDgCAmqByBgAAAACCnCfCmRFTPPPx8ocavi4+Pt6xfPz4cTVu3NjleefgIlDDGSpnAPckRkQqwWzWwROB7akMkhLMZiVGBE6VHQDA91A5AwAAAABBzjmciYmJ8eJIaq6qyplADWec25pROQO4x2QwaGrThEq3mdI0QSaDoZ5GBAAIRoQzAAAAABDk7GFGTEyMTCb/bONTnXAmkOacoXIGqJlLoqIUWsFztzdspH5+GlQDAPwH4QwAAAAABDl7mOHcGszfVBXO5OXlOZapnAGwOidHRSeWe0dF6eYGDRzPVdTuDAAATyKcAQAAAIAgZw8zajrfjC8I1rZmVM4ANbM06+R54v8aNtSExk0UbSz9mux/Odkqstm8NTQAQJAgnAEAAACAIGaxWJSfny8peMKZQGprFhERIcOJeTGonAHck2m16qsTvy8JZrMSIyIVZjTqbycq0bJLSrSe3ycAQB0jnAEAAACAIOYcZARLOBNIlTNGo9ERNhHOAO5ZmZ0te+OygTGxMp0IOAfExDq2WZ6d5YWRAQCCCeEMAAAAAASxYAlnAnXOGenkvDO0NQPc49zS7KrYk4HMxZGRinG0NstRUUlJvY8NABA8CGcAAAAAIIgFSzgTqJUz0sn3Q+UMULU0i0UbT7RybB8aqi5hYY7nQo1G9bWHnSUl+jqP3ykAQN0hnAEAAACAIBaM4UwgzTkjUTkDVMcX2VmynVgeFBPrmLPJ7kqX1mbZ9TgyAECwIZwBAAAAgCAWiOFMZmZmmecDua2Z/f0UFBTIarV6eTSAb1uWdXIumcFOLc3sLoqKUuyJ1marcnJUSGszAEAdIZwBAAAAgCAWKOFMTEyMYznY2prZK2ckWpsBldlVWKifCwslSd3Cw9UuNLTMNqEGg/pGl55PcktKlMrvFACgjhDOAAAAAEAQc64y8edwxmQyOQKaYAtnnN8P4QxQsaXZTlUzMWWrZuwGxJ4Me1fQ2gwAUEcIZwAAAAAgiAVK5YwkxcfHSwq+OWecwxnmnQHKZ7PZtPRESzODpIFOAcypLox0bm2WrQJamwEA6gDhDAAAAAAEsUAKZ+zjLy+cCeQ5Z2hrBlTtx4IC7bVYJEnnR0aqqTmkwm1DDAb1O1GJl2+z0doMAFAnCGcAAAAAIIgFYjiTn58vy4kvYe2Cpa0ZlTNA+Za52dLMboDTNsudXgsAgKcQzgAAAABAEAvEcEYqWz0TyG3NqJwBKme12bQsq3TuGLPkqIqpzPmRkYo3mSRJa3JylE9rMwCAhxHOAAAAAEAQI5zxf1TOAJX7Pi9Ph63FkqTLoqMVdyJ0qUyIwaB+J4LPfJtN63L53QIAeBbhDAAAAAAEMecQI8aNu8l9WWXhjH3OmfDwcJnc+GLWn1A5A1RuaTVbmtld6dLaLNujYwIAgHAGAAAAAIKYPcSIiYnx+9DCncqZQJtvRnJ9T4QzgKvCwkKtPBGsRBqMutwpzKzK+ZGRanDivLg2J4ffLwCAR3k9nHn11VfVvn17hYeHKzExUampqZVuv3btWiUmJio8PFwdOnTQ66+/7vL8W2+9pV69eqlBgwZq0KCBrrjiCn333Xcu2zz22GMyGAwuP82aNfP4ewMAAAAAX2cPMfy9pZnkXjgTaC3NJNfKGdqaAa6WL1+u7BPzxfSNiVaE0f2vwswGg/pFl1YUFthsWrp0aZ2MEQAQnLwazixYsEATJ07UQw89pC1btqhXr14aOHCg9u7dW+72u3fv1qBBg9SrVy9t2bJFU6dO1T333KPFixc7tlmzZo1GjRql1atXa/369WrTpo369++vAwcOuOyra9euSktLc/z88MMPdfpeAQAAAMAXBUs4Y29rRuUMEFzmzp3rWK5OSzO7AU7tHhcuXOiRMQEAIElmbx78ueee06233qqxY8dKkmbOnKkVK1botdde0/Tp08ts//rrr6tNmzaaOXOmJKlLly7auHGjnnnmGQ0bNkyS9OGHH7q85q233tKiRYv0v//9TzfeeKNjvdlsploGAAAAQFCzWCzKz8+XFHjhTGZmpmPZZrMFdFszKmeA8mVnZ+uzzz6TJDUwmXRRDX7/e0ZGqpHJpAyrVUuXLlVOTo7L7xwAADXltXCmqKhImzZt0uTJk13W9+/fX9988025r1m/fr369+/vsu7KK6/U7NmzZbFYFBISUuY1eXl5slgsatiwocv633//XS1atFBYWJguuOACPfHEE+rQoUOF4y0sLFRhYaHjcVZW6WRyFotFFoul8jfrBvs+PLEvAPAkzk8AfBXnJ6D2jhw54liOjY2t1e9TmMI8MaRajcE5eDl69KhjX4WFhbJarZJK25rV+rxhDK/0acuJ5y1VbCcPnb9CQ0Mdy9nZ2ZwXgRMWLVqkgoICSVL/+HgZw8NlreY+DJKuiI/XgowMFRQU6JNPPtHIkSM9Ptb6wvUTAF8VKOen6ozfa+HMkSNHZLValZCQ4LI+ISFB6enp5b4mPT293O2Li4t15MgRNW/evMxrJk+erJYtW+qKK65wrLvgggv0/vvv67TTTtPBgwf1n//8RxdffLF++uknNWrUqNxjT58+XY8//niZ9StXrvRoz+KUlBSP7QsAPInzEwBfxfkJqLm0tDTHcm5urpYtW1bjfT0S/4gnhlSrMfz888+O5a1btzr2lX1iMnCp9u9TktT9Tbc2Sznrxco3qO04Tvjrr78cy7/++mvt3x8QIF5++WXHcvf77tPOM8+s0X66/fCDFjxSeo575ZVXFOPU6sxfcf0EwFf5+/nJ3krXHV5tayZJBoPB5bHNZiuzrqrty1svSU8//bTmzZunNWvWKDz85B1LAwcOdCyfddZZuuiii9SxY0e99957mjRpUrnHnTJlistzWVlZat26tfr376/Y2Or3LD2VxWJRSkqK+vXrV24FEAB4C+cnAL6K8xNQe1u2bHEsn3HGGRo0aFCN93XR3Is8MSStH72+xq9t3bq1HnroIUlSo0aNHO9n//79jm3atm1bq/cpSZreqtKnLcZwpZz1ovr9cI9CSgoq3nDK/oqfqwbncCY+Pr727w8IAIcPH9a2bdskSc1DQjR43nwZK/m+qTLtbDbNNJuVUVzsmDPZXwMarp8A+KpAOT/ZO265w2vhTOPGjWUymcpUyRw6dKhMdYxds2bNyt3ebDaXqXh55pln9MQTT+jLL7/U2WefXelYoqKidNZZZ+n333+vcJuwsDCFhZUt0w8JCfHoXxZP7w8APIXzEwBfxfkJqDnnyeMbNGhQq9+lQhVWvZEbajOGxo0bO5azs7Md+yoqKnKsj4mJqf05o7LAxUlISUHl4YyHzl3x8fGO5by8PM6JgKRPPvnE0c5wcEyMQpzOA9VlknRldLTmZmaqsLBQy5cv1+jRoz00Uu/g+gmAr/L381N1xm6sw3FUKjQ0VImJiWXKlFJSUnTxxReX+5qLLrqozPYrV65Uz549Xd70jBkz9O9//1vLly9Xz549qxxLYWGhduzYUW5bNAAAAAAIVMePH3csx8XFeXEknuH8Hpzfm3MIFVWDCcF9nfN7cn6vQDCbO3euY3lwTO07ngxw2sfChQtrvT8AALwWzkjSpEmTNGvWLL399tvasWOH7r33Xu3du1fjxo2TVNpK7MYbb3RsP27cOP3555+aNGmSduzYobfffluzZ8/W/fff79jm6aef1sMPP6y3335b7dq1U3p6utLT05WTk+PY5v7779fatWu1e/dubdiwQUlJScrKytJNN91Uf28eAAAAALws0MKZmJgYR8vrisIZT84Z6ivMZrOj04PzZ18gWO3Zs0dff/21JKlr1646rZxOKNXVIyLCcVPvF198Ua22NQAAlMer4czIkSM1c+ZMTZs2Teecc47WrVunZcuWqW3btpJKJ6fcu3evY/v27dtr2bJlWrNmjc455xz9+9//1osvvqhhw4Y5tnn11VdVVFSkpKQkNW/e3PHzzDPPOLbZv3+/Ro0apdNPP11Dhw5VaGiovv32W8dxAQAAACAYBFo4YzQaHfNAOL8354lZA7FyRpKio6MlUTkDSNL8+fMdy6NGjap0bmN3mQwGJSUlSSptlfjZZ5/Vep8AgODmtTln7MaPH6/x48eX+9y7775bZl3v3r21efPmCve3Z8+eKo/p/I80AAAAAASrQAtnpNL3kZWVFVRtzaTS95WRkUE4A0iaN2+eY3nUqFEqnPOBR/Y7YsQIvfTSS5JKW5v9/e9/98h+AQDByauVMwAAAAAA7wnUcEYKrrZm0snKGdqaIdj9+OOP2r59uyTpwgsvVIcOHTy274svvlgtWrSQJK1YsUKZmZke2zcAIPgQzgAAAABAkArkcCY/P18Wi0VS8FTOSKXv1WazeXk0gPc4V82MHj3ao/s2Go0aPny4JFqbAQBqj3AGAAAAAIJUIIcz0sn3F0xzzthsNuXn53t5NIB32Gw2zZ07V1JpkDJixAiPH8N5nwsXLvT4/gEAwYNwBgAAAACCVKCHM/aWQ8FUOSOJeWcQtL799lvHXMR9+/ZVQkKCx49x4YUXqlWrVpKklStX6tixYx4/BgAgOBDOAAAAAECQcg5nYmNjvTgSzymvciaY5pyRmHcGwasuW5rZObc2s1gs+vTTT+vkOACAwGf29gAAAAAAAN5hDy+io6NlMpm8PBrPqI+2Zu0K5lb6fJjJpqdlVbfC2Sq0Gircbk+tR3JSRESEY3nNmjVq06ZNwPw/BdxRXFysBQsWSJLCwsJ03XXX1dmxRowYoeeff15SaWuzm2++uc6OBQAIXFTOAAAAAECQsocXgdLSTJLi4+Mdy+VVzgRiW7Pk5GR99NFHjse33HKL2rVrp+TkZC+OCqhfq1at0qFDhyRJV111VZ2e1y644AK1adNGkpSSkqKjR4/W2bEAAIGLcAYAAAAAglQghjNVtTULtHAmOTlZSUlJZVqZHThwQElJSQQ0CBpz556saKurlmZ2BoPB0dqsuLhYn3zySZ0eDwAQmGhrBgAAAABByGKxONp9eSKc+WH33lrvwxOCac4Zq9WqCRMmyGazlXnOZrPJYDBo4sSJGjJkCC3OENDy8/MdQWRsbKwGDRpU58ccMWKEnn32WUmlrc1uueWWOj8mACCwUDkDAAAAAEEoKyvLsRzolTOennPGV6Smpmr//v0VPm+z2bRv3z6lpqbW46iA+rds2TJlZ2dLkoYOHarw8PA6P+Z5552ntm3bSpK+/PJLZWRk1PkxAQCBhcoZAAAAAAhC9uBCcp2nxd9VVTkTERFR72PylB1ndHF5vNEpYKvMqpEjldCgoeNxl192eHRcgLfVZ0szO4PBoBEjRmjGjBmyWq36+OOPNXbs2Ho5NgAgMFA5AwAAECCsVqvWrFmjefPmac2aNbJard4eEgAf5hzOBHrljD2ciYiIkNEYOB+Dm5jda1X25KFDevrQIR0rLq7jEQH1LzMzU0uXLpUkJSQkqE+fPvV27BEjRjiWFy5cWG/HBQAEhsC5KgUAAAhiycnJateunfr06aPRo0erT58+ateuHRNBA6hQMIYzgdTSTJISIyKVYDbLUMV2FknvHjuqfrt26aUjh5WZmVkPowPqx8cff6zCwkJJ0siRI2U211+TmMTERLVv316StGrVKh0+fLjejg0A8H+EMwAAAH4uOTlZSUlJZeYdOHDggJKSkghoAJQrmMIZ+5wzgRbOmAwGTW2aIEllAhr748ujohRqKH2UZyvRaxkZ6tChg6ZPn66cnJz6GyxQR7zR0szO3tpMkqO1GQAA7iKcAQAA8GNWq1UTJkyQzWYr85x93cSJE2lxBqCMQA1nYmJiZDgRRgR65Ywk9YuJ0cwWLdX0lGqBBLNZL7RoqVdbtdaK9h10fXy8Y9LZY8eOaerUqerYsaNmzpypgoKC+h844AHp6elatWqVJKlDhw46//zz630MtDYDANQU4QwAAIAfS01NLVMx48xms2nfvn1KTU2tx1EB8AeBGs4YjUbFxMRIKp2LwmazOcKZyMhIbw6tzvSLidGXHTrq3datNaN5C73burVSOnRUvxN/DgkhIfpXQjMta99B18bGOebdOXTokO6991516tRJb7zxhiwWizffBlBtCxcuVElJiSRp1KhRjmC2PvXo0UMdO3aUJK1evVqHDh2q9zEAAPxT/TXiBAAAQK3tOKOLy+ONWVluvW7j6DFKiI11PO7yyw6PjguA/wnUcEYqfT9ZWVk6fvy4CgsLHZWEgVg5Y2cyGHR+ZOXvr1VoqJ5o3lzTP/lWjz32mBYsWCCptA3muHHj9PTTT+vRRx/VmDFjZDKZ6mPYQK2429JsxBTPfP31Qznr7K3Npk+frpKSEiUnJ2vcuHEeOR4AILBROQMAAODHfizId2u7JVnHlU1rMwBOAj2ckUrfo71qRgrscKY6zjjjDM2fP19bt27V1Vdf7Vi/a9cu3XTTTTrrrLO0aNEiR0UC4It27typDRs2SJK6d++uM88802tj8ZfWZlarVWvXrtW6deu0du1a2t4CgJcRzgAAAPihYptN0w8d1HvHjrm1/brcXF2zZ7fWMvkzgBMCOZyJj4+XJBUUFOiY03mScMZV9+7d9dlnn+nbb79Vv379HOt37Nih4cOHKzExUUuXLi13XjPA2+bNm+dYrqxqpj50795dnTt3liStXbtW6enpXh1PeZKTk9WuXTv169dPzz33nPr166d27dopOTnZ20MDgKBFOAMAAOBnsq1Wjd+/X3NOCWZO7bJufxx6YulgcbHuOLBfD6b9pYyMjLofKACflpmZ6VgOtHDG+f2kpaU5lgN1zpnauuCCC7Ry5UqtWbNGl1xyiWP91q1bddVVV+mSSy5xTLoezKxWq9asWaN58+ZpzZo1VB14kc1mc2lpdv3113txNCdbm0lytDbzJcnJyUpKSiozT+GBAweUlJTkc+MFgGBBOAMAAOBHhtxq1WV5e/VV3ok2PSapxf+1UOu7WsvUwLWfurlhiFrf1Vrtnu2s6G7RjvVLsrJ05plnavHixfU5dAA+JpArZ5zfz19//eVYpnKmcr1791Zqaqq++OILJSYmOtavX79effv2Vd++fbV+/XqX1wRLYGGvOujTp49Gjx6tPn36UHXgRdu3b9eOHaXz51166aVq06aNl0fku63NrFarJkyYUG4FnH3dxIkTA/Z3FwB8GeEMAACAn1i7dq12Pb5LhX8VSpJMUSa1/2d7NezdUHE943T6s6er3YPt1GpcK7V7sJ1Oe+Y0xfWMU2ijULW9r61ajm0pY2Tp5d+hQ4eUlJSkpKQkHTx40JtvC4CXOIczsbGxXhyJ51VUOUM4UzWDwaABAwbo+++/V3Jysrp27ep4btWqVbr44ot11VVXacuWLUETWFB14Hucq2a83dLM7qyzztLpp58uSVq3bp3LucebUlNTy/zddWaz2bRv3z6lpqbW46gAABLhDAAAgF+YPXu2+vXrJ2tu6V2NYc3D1OHRDoo64+QXjQajQdFdohV/Ybyiu0TLYDzZ6MxgMKjBpQ3U+YnOiukR41i/ePFinXnmmfrggw+YUwAIMvZwJjo6WiaTycuj8ayKKmdoa+Y+g8Gg6667Ttu2bdMHH3ygjh07Op5bunSpzj33XA0bNizgAwuqDnxPSUmJY74Zs9ms4cOHe3lEpZxbm9lsNq9WKO84o4vjZ+PoMW69ZuPoMS6vAwDUPcIZAAAAH2a1WnX//fdr7NixslgskqTobtHq8EgHhTUNq/b+QuJD1OaeNpo/f74aN24sSTp69KhuuOEGXX311ZXeWQkgsNjDmUBraSbR1syTTCaTxowZox07duitt95S69atK90+EAKLkpIS/frrr5o7d65GjRpF1YGP+frrr7Vv3z5JUv/+/R3XM77AF1ubNTG7F76vz81RMTfqAEC9IpwBgGoKlr7aALwvKytLQ4YM0bPPPutY16hfI7W9t61MkTW/y91gMGjkyJH6+eefXSbQXbp0qbp27apZs2ZRRQMEAcIZVEdISIjGjh2r33//XXfffXel29oDizvvvFPLli3Tnj17VFJSUk8jrR6r1aqff/5Zc+bM0b333quekZGKMYfojDPO0JgxY/TRRx+5tZ/vR42m6qCe2KtmJGnUqFFeHElZXbt2VZcupf//v/rqKx04cMDLI5LahYTKnavG5Kwsjdn7p3YWFtb5mAAApcxVbwIAsEtOTtaECRNc7p5r1aqVXnjhBQ0dOtSLIwMQaPbs2aOrr75aP/74o6TSO5dffvllvRLxiseO0aRJE82bN0/XX3+97rjjDqWlpSkrK0u33Xab5s+fr7feekvt27f32PEA+A6LxaK8vDxJgR/OMOdM7Z0aNrTJynLrdW+88YbeeOMNSaV/9l26dFHXrl115plnOv7btm1bGY3Vu2/UarUqNTVVaWlpat68uXr16uVWa77i4mLt2LFDmzdv1qZNm7Rp0yZt3brV8btQG88ePqwCm03XxMYqtJrvB+6zWCyOipSIiAgNGTLEyyNyZW9t9vjjjztam91zzz1eG09uSYnuPHBAFd1OaJBkc/rvDwUFGvbnHt3duLGesloDruUlAPgawhkAHlHTD0j+xD4R6Kl3k9v7ai9atIiABoBHfP3117ruuut0+PBhSVKDBg300UcfqW/fvnrlPc+FM3ZDhgzRZZddpvvuu0/vvPOOJOl///ufunXrpieffFJ33nlntb84A+Dbspy+XA/0cIY5ZzzP3TZJznJzc7Vx40Zt3LjRZX1kZGS5oU27du3K/bfH3ZulLBaLfv75Z23atMkRxmzbtk35+flVjrW52ayu4eE6MzxcZ4SG6dGD6TpitaqymtKD1mL962C6XjpyRDc2aKBHjh8PyN8tb0tJSVFGRoYk6ZprrlFMTEwVr6h/w4cP1+OPPy6ptLWZt8IZi82mew8c0E+FBZKkeKNRIQaDDjt1fkgwmzWlaYKams2amp6m3UVFKrLZ9Ozhw/qmVy+9++67Ou2007wyfgAIBoQzAGotGKpJqpoI1GAwaOLEiRoyZEjAhVIA6tf777+v2267TUVFRZKk0047TUuWLKnzD8YNGjTQ22+/rZEjR+q2227Tvn37lJeXp3vuuUcLFy7U7Nmz+XAOBBB7SzMp8MMZ5yCKyhnPSIyIVILZrEPFxRUGFo1MJt3XuIl2WYq0s6hIexMStGvXrjLX03l5eY4qFmcRERFlQpt9+/bprrvuKvdmqWHDhumOO+6QzWbTpk2btH37dhW60Z6pffv2SkxM1LnnnqvExERFj79TDcyuX5U8LGniXwcc1QV29scdQ0O188S/24etxXr2yGG91aaN7rjjDk2YMEHNmzevchxwz9y5cx3Lo0eP9uJIKta1a1d17dpVP/30k77++mvt379frVq1qtcx2Gw2PZKepq/yciVJsUaj3mvTVh1CQ/V9cbF+vHaIun3yqc4zm2UyGCRJi9u204tHjui9Y0dlk7R+/Xqdc845mj59uu6++25u1AGAOkA4A6BWgqWaJDU11e2JQC+//PL6GxiAgFFSUqKpU6fqqaeecqzr27evPvroIzVo0KDexnHllVfqxx9/1OTJk/Xaa69JKu2Z3r17d02bNk333nuvzGYuIQF/F0zhjDPCGc8wGQya2jShwsBCkv6V0Ez9nKoauvyyQ/n5+frll1/0888/66effnL8d+fOnWU+T+Tn52vz5s3avHlzleOxv9b+71ZFOnbsqMTEREcYc+6556phw4Yu2+wo59+4fjExmtmipZ44dFAHi4sd6+1VB/1iYrQtP1+zj2bofzk5sqk0FHzqqaf0/PPP66abbtL999/PTQ61lJeXp08++URS6U0lAwYM8O6AKjFixAg9+uijkqRFixZp4sSJ9Xr8548c1mcngukwg0GvtGylzmFhkqTzoqPV8LLL1PHL/8nkFGCGG416oGlTXREdranpadprsSg/P18TJ05UcnKy3nnnHXXo0KFe3wcABDo+WQOosUCuJjm1r/ZGN/tqbxw9RgmxsY7HXX7Z4dFxAQhMOTk5+vvf/65PP/3Use6OO+7QCy+8oJCQkHofT2xsrF599VWNHDlSt956q3bu3KmCggI98MADWrhwod5++22dddZZ9T4uAJ5DOIPaciewOFVERIR69OihHj16uKzPz8/Xr7/+Wm5oU1JSUqPxnXbaaS4VMT169FB8fHyN9iWVvt+/RUdrU36eDhdb1cRsUmJEpKPqoHtEhF5s2Uq7iwr19tGjWpKfr6KiIhUVFemtt97SrFmzNHToUD344IM677zzajyOYLZkyRLl5pZWggwbNkyhoaFeHlHFhg8f7ghnFi5cWK/hzIsvvqhZR49KkoySZjRvocRqtHQ8NzJSye3a670BV+qll16SJK1bt05nn322ZsyYoX/84x9U0QCAh3A2BVBj1akm8Xfu9tX+taCg3LAKACqyd+9eXXrppY5gxmg06qWXXtKrr77qlWDGWe/evbV9+3ZNmjRJhhNfPm3cuFGJiYmaNm2ao/Wa1WrVmjVrNG/ePK1Zs0ZWa0XTzgLwFYEezlT0JTxzznhWv5gYfdmho95t3VozmrfQu61bK6VDx3KDmcpERETonHPO0ejRo/Xf//5XH3/8sT41mrSpYyclt22nGc1bqG9UtFv7uqNhI33XqbM+MRj1yOYtuv/++9WnT59aBTN2JoNB50dGaXBsrM6PjHIEM87ah4bp382aa/fu3XrggQcUe+LGLfvk8Oeff7769Omj5cuX87mhmvyhpZldly5dHDeyrF+/Xnv37q2X454aBD2ckKArajAvT6TRqBdffFGrV69Wu3btJJXOGzV+/Hj179+/3t4PAAQ6whkAbttxRhfHz/rOp2nmkCFuvW7j6DEur/VHnULD5E48M+vYUd2wb69+OzHpIgBU5ttvv9X555+vbdu2SSr9gvSLL77QXXfd5eWRnRQZGalnn31W33zzjbp0KT2HWywWPfroozrvvPM0Y8YMtWvXTn369NHo0aPVp08ftWvXTsnJyV4eOYDKBHo4Ex0d7QiVnVE543nuBBY1FWY06ozwcA2OjdUNDd1r8XlBVKSifaBqv0WLFnrqqae0d+9ePfnkk2rWrJnjuTVr1mjgwIHq0aOH5s6dq2KnyiOU7+jRo/riiy8klf7ZXnbZZV4eUdVGjBjhWF60aFGdH2/16tW64YYbHKHfHY0a6fr42rXGvfzyy7V9+3b94x//cKz73//+p27dumn27NkEjABQS4QzANxWYrPp29xc/fOvv3T5zj/0qZutvtytOvFVRTab7kv7S+7eB745P1/D9uzRU4cOukxACwDO5s6dq8svv1wHDx6UVNoHf/369erfv7+XR1a+Cy+8UJs3b9bUqVMdrSq3b9+uBx54oEwVpX3eMQIawHcFejhjNBodFQvOCGf8V2JEpBLMZlUU/RgkNTOblRjhW9VRcXFxevDBB7V79269+eab6ty5s+O5bdu2acyYMercubNefvll5eXleXGkvm3x4sWyWCySpOuvv94v2mYPHz7csbxw4cI6Pda2bdt07bXXOqqah8XF6a5GjT2y75iYGL3++utasWKFWrVqJUnKzs7W2LFjNXjwYB04cMAjxwGAYEQ4A6BKf/31l5544gkN3L1Lt+zfp6XZWSpy8w6ZBB/8gFQdNptNj6Wna8OJD0pRRqMan/JBoJnZrBdatNSbrVqp7YkWRFZJ7x07pjPOOEPz58/njiIADiUlJXrkkUc0ZswYFZ6YhPXyyy/Xhg0bHJUpvio8PFz//e9/9f333+vss8+ucDv7OW/ixIm0OAN8VKCHM1L574twxn+ZDAZNbZogSWUCGvvjKU0TPFq540nh4eG67bbbtGPHDi1atMhl3pk9e/bo7rvvVtu2bfXvf/9bGRkZjudoHVrKn1qa2Z1++unq3r27JGnDhg3as2dPnRxnz549GjhwoOPGwKuuukqPJjQrt3qwNvr3768ff/xRt9xyi2PdF198oW7dumnOnDl85gWAGiCcAVCu4uJiffbZZ7rmmmvUpk0bPfTQQ9p34k4lSTJFm9ToykZqNrpZJXuRjrcy6/oHzBox5eSPP3njaIY+ySr98iLUYNBbrVprdcdO5fbVvjQqWp+2a697GjdW2IkL4bS0NI0aNUpXXHGFfvnlF2++FSBo+dKXGrm5uRoxYoT+85//ONbddtttWrFihRo1auS1cVVXjx499Oyzz1a6jX3esYceekhbt2513MkJwDcEYzhjMBgUHh7updHAE/rFxGhmi5Zqanb9TJFgNmtmi5bVnuvGG0wmk4YNG6YNGzZo1apVGjBggOO5I0eO6F//+pfatm2riRMn6o033qB1qEorcteuXStJ6ty5s84991wvj8h9dd3aLCMjQwMGDFBaWpqk0krnBQsWyFxHIWVcXJxmz56tzz//XM2bN5ckZWZm6sYbb9R1112n9PT0OjkuAAQq//qWFECd++OPP/T222/r3XffdVzgOYvqGqWGvRsqpkeMjCGl+W5IwxClfZim4mNleyUX7CnQgbcPqOXYljIYffMutop8nnVcLx454nj8ZPPmOiciQpJ0fmT5d12GGo0a16ixroqJ1fRDh7Q6N0eStGrVKp199tm677779PDDD3PXJlBPkpOTNWHCBJe2W61atdILL7ygoUOH1umxrVarUlNTlZaWpubNm6tDhw667rrrtHnzZkmlLXeeffZZTZgwweN3NtaFU+cM2+pm28annnpKTz31lMySOoWF6aLRo9WjRw+dc8456t69e7lthwDUvWAMZyIjI/3ifIvK9YuJ0d+io7UpP0+Hi61qYjYpMSLSZytmKmIwGNSnTx/16dNH27Zt09NPP60FCxbIarUqNzdXL7zwQrmvs7cOXbRoUZ1fy/iKBQsWOKoyRo8e7Ve/x8OHD9dDDz0kqbS12f333++xfefl5emqq67Sr7/+Kqm0UmfJkiWKjKz7zhWDBw/Wjz/+qAkTJuiDDz6QJH366adKTU3Vq6++qpEjR9b5GAAgEBDOAFBBQYGSk5M1a9YsrV69uszzLVu21C233KIFsQsU2iS0zPNxPeMUe26scn/NVfHxYpnjzLJZbdo7c69sxTZlfpMpU5RJzUZ7vrS6rmzMy9NDTnf93Ne4iQbEuP8FYqvQUL3SqpX+mPG07rnnHu3Zs0cWi0VPPvmk5s6dq5kzZ+raa6/1mz8PwB8lJycrKSmpTIuF+vhSo7xQyGg0qqSkRFJp7+758+dr0KBBdXL8+lDd+cSKJf1SWKhf3nlH77zzjmN9x44d1aNHD0dg06NHDzVrVvW/F1arVWvXrtW6desUFRWlPn36+EX/ecBXBGM4Ewg3x3iqCv0Hj+zFe0wGQ4U3S/mj7t2768MPP9R//vMfPffcc5o1a5YKCgrK3dZms8lgMGjixIkaMmRIUPzb59zSbNSoUV4cSfV17txZPXr00JYtW/T9999r9+7dat++fa33W1xcrJEjR+rbb7+VJDVr1kzLly9X48aemWfGHQ0bNtScOXM0dOhQjRs3TocOHdLRo0d1/fXXa/HixXrllVfUpEmTehuPrzj1Bq1evXoFxe8pgJohnAGC2Pbt2zVr1ix98MEHOnbsmMtzZrNZV199tcaOHasrr7xSJpNJH7/3cYX7MhgNiu4S7bKu9R2ttfflvZJNykjJkCnapKZDmtbJe/Gk3377TXcf2C/LiS90h8fF65aGDWu0r6uvvlp9+/bV9OnT9fTTT6uoqEh79+7V0KFDNXDgQL300kvq2LGjJ4cPQKUfiiZMmFBu72v7uptuuknbtm1TgwYNFBsbq7i4uHJ/qtsCp6JQyB7MNG3aVKtWrVLXrl1r+O58g31i5kPFxaqow3hDk0k3N2ig3wqLtKOwQLuLilRyyjY7d+7Uzp07XVp9NG3atExg06lTJxmNpRWbp4Zfzz33XL1VRAGBIhjDmfq4m7yu/bB7r7eHgDrUvn17vfTSS+rbt6+uu+66Creztw5NTU3V5ZdfXn8D9IJff/1VmzZtkiQlJibq9NNP9/KIqm/EiBHasmWLJOmjjz7SAw88UKv92Ww2jRs3Tp9//rkkKTY2VsuXL1e7du1qO9Qaue6663TppZfqzjvv1EcffSSp9H2uWbNGr7/+uoYOHRo0gYU3q/YB+CfCGSDAVHXRk5WVpfnz52vWrFn6/vvvy7y+c+fOGjt2rG688UY1a1b5fDJViU2MVctbWurA7AOSpEMfH5Ip2iTdVKvd1qkjR45o8ODBOn7iS9RLIqP0cEJCrSpcIiMj9e9//1s33HCD7r77bq1cuVJS6eSJXbt21eTJk/Xggw8q4kTLNAC1t27dOpcPReXJycnRtGnTqtxXaGhopeGN83MxMTG6++67K50Q1Ww264wzzqj2e/I19omZJ/51QAbJJaCxnzEfTWjm0v8/v6RExR/M0ZYtW7R161Zt2bJF27dvV35+vsu+Dx06pBUrVmjFihWOdVFRUY42aMuXLy8znmBs8wLUhnM4E6jtBQOxcgaB6dTWoX+42Tp04+gxSnD6/e3yyw6PjssXzJs3z7E8evRoL46k5oYPH64pU6ZIKm1tVttw5rHHHtPs2bMllV6nfvLJJ+revXutx1kbTZo00cKFC7Vw4UKNHz9eGRkZOnz4sIYNG6ZevXpp586d+uuvvxzbB2Jg4c2qfQD+i3AGCCAV3aUxc+ZMNW/eXLNmzdKCBQuUl5fn8rrw8HANHz5cY8eOVa9evTzaaqtBrway5liVvqC0RVjaB2maN3CeT5ajFxQUaMiQIfrjjz8kSaeFhun5Fi0U4qE/j9NOO03Lly/X4sWLNXHiRB04cECFhYV6/PHHNWfOHL300kt+3eII8Cb7lxolNptScrL19KFDHtt3UVGRjhw5oiNOc1DVxl9//RUwd7raJ2Z+4tBBHSw+Oe9YgtmsKU0TykzMHGE0qsv55+v88893rLNarfrtt99cApstW7YoIyPD5bW5ubn65ptvKhyL/YPw3XffHTRtXoDasIczUVFRMpsD82Mh4Qz8lbutQ6vbYtTf2Gw2RzhjMBj8dh6Tjh07KjExUZs2bdKmTZu0c+fOGndPeP311x03FxkMBs2ZM0d9+vTx5HBrZcSIEerdu7f+8Y9/6NNPP5UkpaamltmuvgKL+qrYqapqvz5aEXqjOslbFVHBUomF4BCYV+FAEKroLo39+/crKSmp3Nece+65Gjt2rEaNGqX4+Pg6G1vjgY1VnFusI58fkWzSjTfeqLi4OJ8KIkpKSnTzzTc7vvhrYjLrtVatFO3hf+ANBoOSkpI0YMAATZs2Tc8//7yKi4u1a9cuDR48WNdee61mzpyptm3bevS4QKArttn0RXaW3sjI0K6iIrdfd2/jxmoREqrsEqtyrCUKu22sjh8/ruPHjysrK8ux7Pxjb09WG853uvr7Xa61nZjZZDKpS5cu6tKli+OOWJvNpgMHDpQJbPbs2VPl/v766y81btxY5513nrp3766zzz5b3bt31xlnnKHQ0LLzpgHByh7OeLKlWbuCuVVv5IY9HtkL4Qz8lzutQ6MMBiVG+H+rvsps3rxZv/32mySpd+/eatmypZdHVHMjRoxwtGf76KOPNHny5Grv45NPPtGdd97pePz8889rxIgRHhujpyQkJOjjjz/WnDlzdPPNN1faZvj2229XgwYNlJCQoCZNmqhhw4Ye+5Ldky3GioqKlJ6err/++ktpaWll/vvbb79VWrVvb0XYq1cv9ejRQ61bt1br1q3VqlUrtW7dWi1btlRYWJhPvFdfPqY3jwvUFcIZIABUdpfGqeLi4jRmzBjdeuutOvfcc+thdKUShiXImmPVsTXHVFxcrKSkJK1cuVKXXnppvY2hMg8//LAWLFggqbQN2WtNmqp5SEidHS86OlpPP/20brrpJt15551au3atpNIL7hUrVuiRRx7RfffdxxeJQBUsFos++OADPbZ7l/ZaLC7PhUiylP8yGVRa3XFLw0YuIUKXJ56o9Hg2m015eXmOoGbbgIHKLrEq21qi3JIS/VSQrwVOrYIqEmh3unp6YmaDwaBWrVop+4p+6iQpSZLCwrUooZn+dTC9ytdnZmYqJSVFKSkpjnUhISHq0qWLunfv7vg5++yz1bRp9eZC4049BIq6CGd8TSDOOeMtI6bU/quDHzwwjmBRWetQu1ybTYuPH9eIOrzJztvmzj0Z+PprSzO74cOH68EHH5RU2tqsuuHMV199pVGjRjluEnrggQc0YcIEj4/TUwwGg9q0aVPldxQZGRn629/+5vK6Bg0aqEmTJmrcuLEaN25c7rLzuujo6DLdP9xtMVZe6FJeAOOpCvr169dr/fr15T6XkJDgCGtODW9at26tFi1aKKSc7yi80U7NWy3cvN06js8BqAuEM4Cfy8zM1CuvvFLl3AqSNGXKFD388MNe+WBqMBjU4sYWsuZZlfVdlvLz83XVVVdp7dq1Xu+PO3v2bE2fPl2SZDQaNX/+fE09OtUj+67qQ2jXrl21evVqzZ07V/fdd58OHjyo/Px8TZ06Ve+9955eeeUV9e3bVxIXAoCzwsJCvfPOO3ryySf1559/ujwX2TlSTa5popKCEu17ZV+5r7dJChnXXKN6un7Aqep31mAwKCoqSlFRUWrRooV0ylxR19nitCY3t8I7Xe2hUKDf6VpX2oS6F5pHG43KOaXCyWKxaPv27dq+fbvmzJnjWN+sWTOXCpvu3bvr9NNPr/DDL3fqIRAUFxcrNzdXUmCHM6dWhlM5U3M/7N7r7SEEnYpah8Yajco68W/cvw+mq2VIiC4JwL/bVqtV8+fPl1R6g8WwYcO8PKLaad++vc477zx9//332rJli37//Xd17tzZrdf+9NNPuvrqq1VQUCBJuuGGGxyfX33JqXMnbXRz7iRnNptNR48e1dGjR/Xrr7+69ZqwsDCX8KZhw4ZatmxZpRU7119/vWJjY8u00a0pkyRrLfdx8OBBHTx40FFhdSqDwaDmzZu7BDYtW7bU9OnTK22ndvfdd6tHjx6yWCwqLCxUYWGhCgoKarRcUFCggoICff7555X++Y4ePVoXXXSRzGazzGazTCZTmZ/qrjcajXr++ecrPe6dd96pCy64QI0aNVJ4eHht/neUwecA1BXCGcDPZGdna26XM/VdXp425OVpR2GB3G2w0/Dd9/Rn8seOx/XdSsdgNKjV7a3UKr6VVq5cqePHj+vKK6/UV199pU6dOtXrWOy+/PJLjRs3zvF45syZuvrqq3X1Y3+vtzEYDAaNGTNGV111lf71r3/p5ZdfVklJiX799VddccUVGjlypPr27atp06ZxIYCgl5+fr7feektPP/20Dhw44PJcVJcoNbmmiaLOiDp599xdUtqHaSo+dvJLjZCGIWo2upnietb+C8ny7iQO3dhctpfdD4W4k9h9VbV5sYdfK9t30NESq34tLNSvBYVK63eFtm/frl9++UVWq+tH5/T0dKWnp2vFihWOdaGhoTrzzDNdQpt9+/bplltuYZJXBIQspy/MAjmcoa2Zf/NEtY7k3//OVtQ69JnDh/TesWOySpr41wHNbdNWnWvREskXrVu3zjGB/MCBA9WwYUMvj6j2RowYoe+//15SaWuzqVOrviFw//79GjBggDIzMyVJ/fv31+zZs2U0GutyqB7hbqX4ldExijAadNRq1TGrVbktWujw4cPKzs526/WFhYU6cOBAmc8GlbFYLG4FM6GhoWrevLlatGjh+G+LFi1ke/FFNTWHqInZpKbmEEUbDOq3e1eV16gftG6jw1ar0ostSrcUq+jvY7R//37t27dP+/btU1paWoUtlG02m6Oy57vvvnPrfdpf06FDB7e295TCwkKtWbOmXo8plV7Xt2rVSlJpaBcfH+/4adCggcvjin4aNGiguLg4lzZz3q7Y8QZuDq4/hDOAj8vLy9M333yj1atXa/Xq1fruu+/KfLHkLl9oo2M0G5WcnKwrrrhC3377rQ4ePKh+/frp66+/Lr0LvR79+OOPGjZsmIpP3Ik2YcIE3X333fU6BmdxcXF64YUX9H//938aP368o9x5wYIFjpZrzgL5QgA4VU5Ojl577TU9++yzOnjwoMtzAwYM0G+Jvymyc9lqlLiecYo9N1a5v+aq+HixzHFmRZ0eJYPRvflQaiKuZ1ydh0LBqrI2L/b/o1OaJshsNKqp0aim5hD1iopWlxNtUQoKCvTzzz9r+/bt2rZtm+Pn6NGjLscpKirS1q1btXXr1irHZL8rccKECQE3ySsC13Gn9ouEM/BV3qrW8bVQqLzWofc3aar9Fov+l5Oj3JISjdu/T/PbtlOXCvbhj+bNm+dY9veWZnbDhw/XP//5T0mlrc2qCmeOHTumAQMGOG7QS0xM1KJFi8qt7vVF7t5U80yLFq5thk/cRFpYWKiMjAwdPnxYR44c0ZEjR8pdPvD11zpabNUxa7GKyzlORRqYTGobEqqmZrNOu/kmR/DiHMY0bNiwTMs0Sdrx3vtl1rlzjdoiNFQtJHVXafV9l+eec9lHcXGx0tLSHGHNvn37XMKb3Zs3K8NqrXAuKpxUWFjoqEaqiYiICMXHxysuLk67du2qtDpp4sSJdfo5oL5RJVS/CGeAOlSTL1IKCwu1YcMGrVq1SqtXr9a3336rokomtw5rFaaoM6J0/NvjsuZUHNqENAzRjMdjXb6Q9NZdZFFRUVq6dKl69+6tH3/8UXv27FH//v21bt26ersjKi0tTYMHD3bcOXrNNdfo2WefrZdjV+Wcc87RV199pXfffVcPPPBAhXf0BOqFAODs+PHjevnll/X888+X+V0YMmSIHn74YfXs2VNnvXdWhfswGA2K7hJd10N14Y1QKFhU1OYlwWzWlKYJ6hcTU+Frw8PDde6557rMuWa/o3Dbtm0uoc2vv/5a4Z2Lp7LZbNq/f7+aNm2qtm3bqnnz5o6fZs2alVmubpsFPiDB04I1nGHOGbjDH1q4mQwGPdW8hW7au1c/FRYorbhYdx3Yr2/z8gLi73lhYaEWLVokqfSz49VXX+3lEXlG27ZtdcEFF2jDhg2Oa43TTz+93G0LCgo0ZMgQ/fTTT5Kkjh07aunSpYqp5DrHWWUhY5jMekTSzZPMKqykGVdtvy9w96YaUznhh1Ra+WAPTCpjb6dms9mUW1Ki1Tk5ejA9rcrxPd+ihSP47PLSS1VuX5XaXKPamc1mR8uy8uw4o4uKbDYdLrYozVKsr3Nz9MYpNxmV57yICDUPCVGowaBQg0HNb71VYWFhCg8PV1hYWJXLf40Zc+K1Rv1cUKD70/6q8phvtGylcyMjZLVJJZI6fJUqq9Va7k9xcXGZdTtHXq8SScU2m0ok/VyQr+fdmPunR3i4zAaDskpKlG21Kicy0qVi2F35+fnKz89XWlrlf5dsNpv27dun1NRUXX755dU+jq8JxiohbyOcAeqIu1+kWCwWbdy4UatXr9aqVav0zTffKD8/v8L9nnHGGTrU4pCizohS1BlRMseW/hpHnRGlfRW00ZGkZqOb+dSXgg0bNtTKlSt1ySWXaPfu3frpp580aNAgffnll4qOrtsvUXNzc3XNNddo797SD16JiYmaO3euT4UbRqNRt9xyixo3bqwhQ4ZUuF2gXQgAdkePHtXMmTP14osvunyJaDAYlJSUpIcfflhnn322F0dYNW+EQsHC3ubl++Ji/XjtEHX75FOdZzZX+OG+MgaDQS1btlTLli01aNAgSaUffAs6dtLOoiL9Ulig5VnZ+jovt8p92Xukb9mypdLt4uPjKwxunB/Hx8fr448/5gMSPC5YwxkqZxBIIo1GvdqqlUb+uUfpxcX6oaBAN9xwgz766CO/aHlVEavVqmeeeUbHjh2TVHoTXSAETnYjRozQhg0bJJW2Nnv44YfLbGO1WjVmzBilpqZKkpo0aaIVK1YoISHB7eNUFjJajOFaFi+t/3OfQkoKqvcGqskTgYW7DAaDok0mDYqN1XNHDntlDsiKWhHW5Bq1IqEGg1qGhKplSKh6RETok6ysKt/r263buFYnPfNMtY65I/zkPJutQ0I043DVFVEXR0W5HLM6f38lKf6U3/sLIyM1NzOzyuO+36ZtmUosq9WqrKwsZWZmuvwcO3aszLr9yR8rq8SqLGuJskusOlpsVZEbtUrfjBqlhLh4xzH9kdVq1YQJE4KqSsgXEM4AdaCqpPnJJ5+UwWDQ6tWrlZqaqpycnAr31aFDB/3tb39Tnz59dPnll6tFixbl3iHuj210mjdvrpSUFF166aVKT0/Xhg0bNHToUC1ZssSlv6cn2S90N27cKElq06aNlixZ4jMf1k+dRPE3N+/w2Dh6jBJiYx2P/fViAPXHV1skHTp0SM8995xeeeUVl3Oj0WjU6NGjNXXqVHXpEkhNO1BTJoNB50VHq+Fll6njl/+TqbDQo/sPNxrVNTxcXcPD1TokxK1wJt5oVHZJSZUTwto//O3YUfm5OiwsTMXFxXxAgscRzgCBoYnZrNdattLf9+1VbkmJkpOTNXnyZD399NPeHlqNlHeDY0pKipKTkwPmRoSkpCTdd999kkpbm50azthsNt1zzz1KTk6WVHreWrZsmTp27FjvY/WU+ggsnNW2YscTxz+1FWFd8cZ79dafb22OazKZ1KBBAzVo0KDK4+z4zvWz5nd5ubp5X8U3Qts9eeiQDhYXa3R81cfwVZ9++qnL+fdU3BxcNwhnAA+rKmmWpAcffLDC17du3Vp9+vRxBDJt2rRx+9j+2EanY8eOWrFihXr37q3MzEylpKTohhtu0Lx58+rki6b7779fn376qSQpNjZWS5cuVfPmzT1+HE9xd54gX5hPCP7DWy2SKguE/vrrL82YMUNvvPGGS/Wg2WzWjTfeqClTpqhTp051NjZUzdd68Ncnd3ump3Qo/eIk02rV4eJiRbz+mtLT05WWlub4cX6cl5dX6XELqwic+ICEmnIOZ+Lj4703kDoWHR0tg8HguAYPpLvvAbvTw8P1XIsWGr9/v6ySZsyYoc6dO+u2227z9tCqpaIbHDMyMgKqUrRNmza66KKLtH79ev3www/asWOHy41HTzzxhF599VVJpdfBixcvVs+ePb01XI+pz8BCqt+KHW/zxnv11p+vN45b1ecAu+ySEr145IhmHz2q8Q88oHvvvdenv2uSSm8Ottls+i4/TwsyM7UyO9ut13FzsGcRzgAedPjwYX344YeVJs2natasmfr06eMIZDp06FDuhHPu8sc2OmeffbY+//xz9evXT/n5+froo48UHx+vN954o1Z/Fqd6+eWXNXPmTEmlF7qLFi1St27dPLb/uuDOhUCs0VgnJdkITN7qIVtRIPTwww9r+/btmj17tssX0aGhobr11lv14IMPqm3bth4fD6rPH3rw15Xq3qnXyGxWI7NZXQYMqHCfNptNOTk5LsHNtnsm6LC1WIeLi3WkuFi7CouUbq16alvnD0h8OII7gqVyxmg0KjY21vF+qZxBoOoVFa2HmiZo2qHSia/vuOMOtWvXTv369fPyyNwTbK10RowYofXr10sqbW32r3/9S5L0zjvvuFTSvP3227ryyiu9MsZAUN8VO97kjffqrT9fX6vEskk6NzxcWwsKVCIpt6REM2bM0AsvvKCbb75Z//znP33yJsNjx47p/aNHteB4pnZXMs91ebg52LMIZxAUPN2+JyMjQz/99JN++uknfTV1qv4oLNIfRYU6aq2qkclJ18XG6paGjdQhNFSGzVvUZe7cGo8nEFxyySVKTk7W1VdfreLiYr311ltq1KiRpk+f7pH9f/7555owYYLj8WuvveYXH1YquxCwyyop0TtHj2pso0b1PTz4GW998K0oENq/f7/GjRvnsi48PFz/+Mc/9M9//lMtW7b02BiA2vL0nXoGg0ExMTGKiYnRaaedJknq/tjjLtu420aBD0iormAJZyS5hDO7d++W1WoNiC93gVNd36CBsseM1vPPPy+r1aqkpCR9/fXXPn8zmiSlpqYGVSudpKQk3XvvvZJKA5nOnTtrz549LsHMU089pRtuuMFbQ6wRX6yyru+KHW/yxnv11p+vL1Zi/VlUpHeOHtXHWcdlsdlUVFSkN998U7NmzdLw4cM1efJknXPOOfU25vLYbDZ9//33eu211zR//nwVFLjOOWWMMUoWqaSgpMJ9hDQM0YzHY1069PhjZwRfQjiDgFeb9j1Hjx7Vzz//7Ahi7D8HDx6s9biGxMWpYx3Nq+KvBgwYoDlz5mj06NGy2Wx68skn1ahRI91///212u/mzZs1cuRIlZSU/gMzefJkjR071hND9rjyL2gbqNVGY5n5hIzhRsc/ms8dOaw5l0hNr20qg8HAP44ol7sffBs3bqzo6GiFhIQoNDRUoaGhjuVT/1vVcyaTSTNnziw3EHIWGRmpu+66S5MmTar2ZJEIXFV9yA+TWY9IunmSWYWVzPTiqXNifd+pR/Uk6kqwhDPJyclKT093PH7sscc0a9asOm/jCXjLjBkztGvXLn366afKysrSVVddpQ0bNvjctdWp82x+53ROqkygVIq2atVKZ5xxhn755Rft2bNHo0ePdnl+woQJ+uc//+ml0dWct6qsPREK8fnZ93gr7Kv4uA3UuCReEadMI/CW0aC3JJV+xR6pDpkJGvLXEL322mvKzs5WSUmJFixYoAULFmjgwIGaPHmyevXq5dEuMVXJycnR3Llz9frrr2vLli1lnu/d1qQ7eobqui5mff5bsZIWlrYZL69bwPwrzBr6Z9U3j8F9hDMIaO6278nMzCwTwPz0008uH+aqYoo1KbxluEKbh+r4t8dVkkfSXBPXX3+9jh07pvHjx0uS/vnPf6phw4a65ZZbarS/ffv26aqrrnL09R85cqT++9//emy8nlbhBW0jyXpXhFL3WpWWbVPzGIN6tTHp6a+LNHVVaSuow58e1o2HjmtGP0I/lDr1g+/GrCy3XmefrLw+zZ07V0OGDKnXY8L3VfUh32IM17J4af2f+xRSUlDptp7iK5O82mWVlOjdo0d1K9WTqIZjx445ln///XddeumlAVdN4q02noA3mUwmffjhh+rdu7c2bdqkP//8U9dcc41Wr17ts3MupVsseufoUbe2DZRK0eTkZP3yyy8VPn/ppZfW6xe3/s4boZAvVgkFGm+FfVUeN/zEjyT9WcG566mnNGXKFL366quaOXOmDh8+LEn64osv9MUXX+jiiy/W5MmTNXjwYBmNRo+N/VQ//PCDXn/9dc2ZM0fZp8wnExcXp5tOz9e4niHq0uTkuXVolxAtGiFNWF6g/Vknr6FaxRo0c0C4hnYJqbPxBivCGQSsqtr3SNLo0aPVsGFDpaWlub3fJk2aqGvXro6fGXtnKKxlmMwxJ3+dos+M1r6XK06Sm41u5hLMwNUdd9yhjIwMPfLII5Kk2267TfHx8dX+AJ2VlaXBgwc7/v9efPHFevfdd+v0H7+6ZDIadHk719P2lF5higyRJq4oDWieXV+kPItNLz9a4rfvE3WnsKTi0NhZY5NJJoNBFptNxTabisPDZbFYZLFY6mxsv98xXjsenOx47M93QwKeVFEbhSiDQbknrmeePXJYOSUluqdxY28NE34kOTlZH374oePxbbfdpscffzygqkmCbf4KwFlUVJQ+++wzXXDBBdq/f7++++473XjjjVq4cKHPfT74Mjtbj6Sn6XgV16gGlbYPCoRKUfv5qSIGg0GTJk3Sddddx/nJh/lzlZBU/VCoy/V/eeS4wSQ+Pl5Tp07VxIkT9c4772jGjBn6888/JUnffPONrrnmGnXr1k0PPvigRo4cqZAQz4QeBQUFWrx4sV577TV9/fXXZZ4/77zzNG7cOF1//fWKfLp5ufsY2iVEQ043l7k52MT3mHWCcAb1ytNzv5zKZrMpIyNDO3fu1JIlSypt3yNJhYWFFQYzjRo1cglh7D9NmjRx2e7V914t89q4nnHSXSrThiqkYYiajW5W+jwq9dBDDykjI0MzZ85USUmJRo0apWXLlqlv375uvd5isWjEiBH64YfSy46OHTvqk08+UXh4eBWv9D8TLgxTVKhBty8pkE3SaxstyrvlFs2aNUtmM6f5YOZ88Z75bab2z666GjCkYYgSnjnNtbLvptLfI5vN5ghpioqKHP91Xv7t6mtksdkcPz8W5Ov5I0eqPG6g3AmJwOBrH0Araqc2++hRzTxSeifeG0czlFtSovdKCOf9SV1fG58qWKpJgm3+Cvg/T3/h2qJFCy1dulSXXHKJcnJytHjxYk2ZMkVPPfWUR45TW/klJXrq0CEtPJ7pWBdvNCqzpKTcCbclaUrThICYyJ3zE2rDW6EQai4yMlJ33nmnbr/9di1YsEBPPvmkfvrpJ0nSjz/+qBtuuEGPPPKI7r//ft1yyy2KiIio0XH++OMPvfnmm3r77beVkZFRZgyjR4/WuHHjlJiY6Nb+yrs5GHWDP2XUm9rM/eKspKRE+/fv186dO11+fvrsM+21WJTj5p3hdhEGg84MD1en0DB1CgtT3/nzHCFMbUqJ43rGKfbcWOWe0o+Sihn3GAwGPfvsszp27Jjee+89FRUV6dprr9WqVat03nnnVfpam82mu+66SytWrJAkNWjQQEuXLi0TrFWmXcHcWo3fbo9H9lK1seeGKsJs0E2f5Mtqk9577z3l5+frgw8+8NgdGPBPJcUlOrjgoDJSMqreWJVX9hkMBsdcMlFR5bd1Gv14tMtjW0mszPdlugTVp6LVI1CqqlDIdUrn43peUsfvwnX3F6Ut3T7IPKaQsWP11ltvcbetH/DUtbGd1WpVZmamjh49qmPHjunYsWMuyxkZGXrjjTcCspqkpm08neevkKjahPfUxReuZ599thYuXKirrrpKJSUlevrpp9WpUyfddtttHj9WdWzbtk3D/9yjXUVFjnX9o2P0eLNm+i4vr9IJt/0R5ycAkhQSEqK///3vGj16tJYuXarp06dr/fr1kqQ9e/borrvu0uOPP66JEydq/Pjxio+Pl1T5jTzFxcVasmSJXnvtNaWkpJQ55plnnqk77rhDN9xwQ0DPMejvCGdQL6p7l15hYaF2795dJoDZuXOndu3apSKnC7naerVlK13g9CVjlz59PLZvg9Gg6C7RVW+IchmNRs2aNUuZmZn69NNPlZOTo4EDB2rdunU688wzK3zdM888ozfffFOSFBoaqk8++USnn356fQ3ba8acHaKIEOn6RfmylEgLFy5Ufn6+Fi5cGJAVQ6ia5ZhF+17Zp7w/8hzr4i+NV3S3aKUvSK+Xyj6D0aDmY5rT6hGoI3edH6qYUOmWzwpUYpPeeecd5eTk6IMPPlBoaKi3h4cKVHVt/Morr+iCCy6oMGhxXrY/znLzC7+KBNLd2u5WY1K1iUA3cOBAvfTSS7rzzjsllbaPbteunfr161fvY7HZbHrxxRf1wAMPOD7PR5yYW21oXJwMBkOFlaKBUDFjx/kJCG5Go1FXX321rrrqKqWmpurJJ5/UF198IUk6fPiwHnroIT355JO64447dNppp+mxxx4rcyPPv/71Lx04cEBvvfWW/vrL9eau0NBQJSUlady4ccxf5ScIZ1Dn3Jn75ZZbbtHSpUsdgcy+ffvK3b5SBimkcYhCm4YqtEmoQhqHKGN5hqw51gpfEtIwRM9M425tX2Y2mzV//nwNHDhQa9asUUZGhvr376+vv/5abdu2LbP9okWL9MADDzgev/3227rsssvqc8heNbRLiD65XhqWbFNBQYGWLFmia665Rh9//HGFlQ4ITGvXrtXOR3eqOKs0gDGYDWr+9+Zq0LuBDAaD4s6Pq7fKPlo9AnXrpnNCFR1q0KjFpeH8Rx99pJycHC1atMhnJ4AOZu5cG48fP76+h+XgfLe2v96pnRgRqQSzWYeKi1XeJ4pAmr8CqMr48eP1+++/a+bMmbJarUpKStI333yjrl271tsYDh48qP/7v/9zfAEpSV3CwvRMixZqHxrmsq3JYND5kYH7uYXzEwCptCvFZZddpssuu0xbt27VU089pYULF6qkpETZ2dl6+umny33d/v37dfvtt5dZ36FDB/3jH//Q//3f/1Wrawy8j3AGda6qnqqSdPz4cb399ttV7isiIkIdOnRQx44dXX46deqka9deK4PZ9YvFsGZh3K0dAMLDw/Xpp5/qb3/7mzZt2qQDBw6oX79+Sk1NVePGjR0lnseOHdOkSZMcr5s2bZrGjBnjxZF7x6DOIVq69GNdc801ys3NVUpKigYMGKClS5cq1qk0HoHJZrPp2Wef1eTJk2W1lobTIY1C1PrO1orscPJDXn1X9tHqEahbw84M0WehBg1Ntik/P19ffPGFBg4cqCVLlnDu9zHuXBu7y2w2q0GDBmrQoIEaNmzosmydN0+xRpPiTCbFmow6UGTR9MOHqtxnINytbTpxN/7Evw4E/PwVgDueeeYZ7dq1S5999pmysrI0ePBgbdiwQQkJCXV+7OXLl+vmm2/WwYMHHetubtBQExs3VmgQzJFW3nxCoRuby1bB9xQ2SSHjmmtUT9fW1NxECgSuc845R/PmzdO///1vzZgxQ++8844sFkuVrzMYDLrmmmt0xx13qF+/fsw76acIZ1AnnPuquttT1S7WaFTb0FB1vfbaMiFM8+bNKzzZGL4u++GKu7UDR2xsrL744gv16tVLv/76q37//XddeOGFKioqKlPGKUk33XSTHn74YS+MtHY8NtfN3/6mlStXauDAgcrKytJXX32lK664QsuXL1fDhg09cgz4nqysLN1yyy1avHixY110t2i1+kcrmWO8/08+rR6BujWgk1nLly/RVVddpezsbK1bt059+/bV8uXL1ahRI28PL2idOt/Au05fUFbmwohInRURoViTUfFGk8589ZUyIUx0dHSF7SrO6rnG5bGtxCbzfUerNQeYP38Z2C8mRjNbtAy4+SuAmjCZTPrwww/Vu3dvbd68WX/++aeGDBmi1atX13jy6aoUFhZqypQpev755x3rEhIS9P7776v1PRPq5Jj+gu8pAJSnU6dOeuONN9S/f38lJSVVuf2CBQs0fPjwehgZ6pL3v6lBwHP37rtHExI0ICZWcScmtuoyb55Hjs/d2oGjSZMmWrlypS655BLt379fe/bsqXDbQYMGBX1vzYsvvlirVq1S//79dfToUX3//ffq06ePUlJS1LRpU28PDx72008/adiwYfr1118d65pc3URNr2vK+Q4IIpdddplWrVqlK6+8UkePHtXGjRvVu3dvpaSkqHnz5t4eXlD7y2LRE4cOalVOjlvbj2vcyKW1Txen+RlrIhjnAAuG+SsAd0VHR2vJkiW64IILtH//fm3YsEE33nijFixY4PG7rX/55ReNGjVKW7dudawbNGiQ3nnnHTVt2lT+2TDRs/ieAoDdWe+d5fI489tMt143adUkTcub5nj8w03+fFtN8CKcQZ1wLt21lcTKfJ+5yrv0Fk1vrMV1NPcLd2sHjjZt2mj58uU6++yzVVJSUu42BoNB999/v4YNGyaTyf9bc9RGYmKi1q5dqyuuuEIHDx7U9u3b1bt3b3355Zdq2bKlt4cHD1mwYIFuvfVW5ebmSpLi4uL0wQcfaErGFC+PDIA39OzZU2vXrlW/fv2Unp6un376SZdddpm+/PLLcudrQ90a/k+TMlIydPDjg7IVuTen4qkVLJJnro2D8W7tQJu/wmNV1h7ZC/xNixYt9Pnnn+vSSy91zE02depUPfnkkx7Zv81m06xZszRhwgTl5+dLksLCwjRjxgzdddddQX/z3Kn4ngJAecxxRDNL/AAAHVNJREFU7n1d7+528G00o0Ods9+lV5lAu0sPdevw4cMVBjNS6YeCffv2KTU1tR5H5bu6deumdevWqVWrVpJK72S77LLLKq08gn+wWCyaOHGirr/+ekcw0717d23atElXXXWVl0cHwJu6deum1NRURxjzxx9/6NJLL3WprkPdW79+vXY+vlPpC9IdwYw5zqxGV1beZq4ur43jesbp9GdPV7sH26nVuFZq92A7nfbMaQEZzAAoq3v37i7VMk899ZRmzZpV6/0ePXpUw4cP1+233+4IZs4880x99913uvvuuwlmAMBNUadHydyg8uAlpGGIok4PnJtPghkRG+pFMN6lB885tVe7u/MYbRw9RgknJkHu8ktwF8+fdtppSk1NVd++fbVr1y7t2rVLvXr10v/+9z+ddtpp3h4eauCvv/7SiBEj9PXXXzvW3XTTTXr11VcVGRnpxZEB8BWdOnVyzDn266+/av/+/erVq5dSUlLUvXt3bw8voB07dkyTJ0/Wm2++eXKlQWr4t4ZKGJogU5RJkZ0jvXZtzN3aQHAbNGiQXnzxRd11112SpDvuuEPt2rXTFVdcUaP9rV27Vn//+9+1f/9+x7px48bp2WefLfe6tMv1ZecMBYBg9cPuvWXWJfcLUdLC0mtE57pre8w9/wqzhv5Zcata+A/CGdQbeqqippzb5ElSzo5Q6amqX/fKjaF6r0vpa+m8KbVr107r1q3TFVdcoV9++UX79+93tLnp1q2bt4eHali7dq1GjhypgycmlQ4NDdWLL76o22+/nbsSUWdopeOfWrVqpXXr1unKK6/U1q1bdfjwYV1++eVatmyZLrroIm8PL+DYbDZ9+OGHuu+++3To0CHH+vC24WpxUwtFdjj5JSXXxvAnnvg3YE/thwEPuvPOO/X777/rhRdeUHFxsZKSkvTNN9/ozDPPdHsfFotF06ZN03//+1/ZbKVfHzZs2FCzZ8/WtddeW0cjB4DAN7RLiBaNkCYsL9D+rJPxTKtYg2YOCNfQLiFeHB08iXAG9Yq79OAJ9hLPquYxosSzrJYtWzrmIdi+fbsOHjyoyy+/XCtWrFBiYqK3h4cq2Gw2Pffcc3rwwQdltVolSa1bt9aiRYt0/vnne3l0AHxV06ZNtXr1ag0aNEjr169XZmam+vXrp08//VR9+/b19vACxq+//qrx48dr1apVjnXR0dGKviZajfo2ksFUNnTh2hiomLduCgimmxGeffZZ7dq1S0uWLNHx48c1ePBgffvtt0pISKjytbt27dKYMWP07bffOtb16dNHc+bMYW5LAPCAoV1CNOR0s1L3WpWWbVPzGIN6tTHJxI08AYU5ZwD4HeYxqh37l3TnnXeeJCkjI0N/+9vf9M0333h5ZKhMdna2RowYofvvv98RzFxxxRXavHkzwQyAKsXHx2vlypWOMCY3N1eDBw/WkiVLvDwy/1dQUKBHH31UZ599tkswM2zYMP3yyy9q3L9xucEMAHibyWTS3Llz1aNHD0nSnj17NGTIEMecMRWZO3euzjnnHEcwYzabNX36dKWkpBDMAIAHmYwGXd7OrFFnhejydmaCmQBE5QwAv8Q8RrXTsGFDffnllxo8eLC++uorZWVlqX///vrss8/0t7/9zdvDwyl+/vlnDR061GUi74ceekiPP/64TCaTF0cG1K2q7l4OM9n0tKzqVjhbhdaKP6js8fC4/FV0dLQ+//xzjRw5Up999pkKCwt13XXXac6cORo1apS3h+eXUlJSNH78eP3xxx+Ode3atdPLL7+swYMHe3FkAOCe6OhoLVmyRBdccIEOHDigDRs26MYbb9TcuXP19ddfKy0tTc2bN1evXr2Um5uru+66S3PmzHG8vmPHjpo7dy43CwEAUAOEMwD8Fr3aayc2NlbLly/Xddddp5SUFMdd1MnJyRo4cKC3h4cTFixYoFtvvVW5ubmSpLi4OM2ZM0dXX321l0cGwNPqY06H8PBwLVq0SDfffLPmzp0rq9WqMWPGKCcnR7fddlutjx8s0tPTNWnSJM2bN8+xzmw26/7779cjjzxS7gTYAOCrWrZsqc8//1yXXnqpcnNztWjRIq1YsULZ2dmObZo2bSqDweCY81CSbrzxRr388suKiYnxxrCrpbwJtwEA8DbCGQB+jV7ttRMVFaXPPvtMI0aM0JIlS1RQUKAhQ4Zo/vz5Gjp0qLeHF9QsFoseeOABzZw507Hu7LPP1uLFi9WpUyfvDQwIAoE+30BISIjef/99RUdH680335TNZtPtt9+u7OxsTZo0ydvD82lWq1VvvvmmpkyZouPHjzvWX3rppXr99dfVtWtXL44OAGrunHPO0YIFC3T11VfLZrO5BDOSdOjQIcdybGysXnvtNY0ePbq+hwkAQEAhnAGAIBceHq7FixdrzJgx+uijj2SxWDRixAi99957uv7667V27VqtW7dOUVFR6tOnD220PMxqtSo1NdWlZcShQ4c0YsQIffXVV47tbrjhBr3++uvcjQ3AI0wmk15//XXFxsbqmWeekSTdd999ysrK0qOPPiqDgSrUU23ZskXjxo3Td99951jXsGFDzZgxQzfffLOMxv9v7+6Do6rvPY5/NsnuQhISQoJ5uBhARJoLdlKTuWmgLeYPIVrkoSL0QpEoxMkw1CRYqXEgAWp0NIgB5FHRog0md4RWp2IFR8tjQElpx2JxBpQbSIKUBAxPbkL23D+8WV3yHLJ7Ntn3a+Y32fM7vz3f32aSb07Od89v+ThPAL1benq6wsPDdfHixTbH2Gw2VVRU8GYh+DWz3sjTm+N2NSbgLyjOAABktVq1bds2BQcHa+vWrWpqatKvfvUr/frXv9aFCxckSatWrdKQIUO0evVqj99V01rBwhtFIW/H3bFjh7Kzs3XmzBlX3+DBg9XY2Oj6p9hqtWr16tXKysriYimAHmWxWPT8888rLCxM+fn5kqTly5ervr5eL7zwAjnn/126dEkFBQVavXq1nE6nqz8jI0NFRUWKiooycXYA+gJfuWNz37597RZmJKmhoUFnzpyhOAPAo3wlLwKeRnEGACDp27XyX331VQUHB2vDhg2S5CrMNKuqqtL06dP11ltveaxA01rBwhtFIW/H3bFjh6ZPny7DMNz6//3vf7vFf+utt5SSktLj8QFA+rZAs3TpUg0YMEC5ubmSpBdffFGXLl3SunXrdPDgwT5fKG8rZkBAgP70pz/psccec/vbkJCQoA0bNmj8+PEenRMAeNq/fpDgtn2kvr5Tzzsya7aiw8Jc2wnH/9Wj88LNa+/Ctj3Q0PNq0hjHFjma2n4jxikPzAsA4I7iDADAJSAgQGvWrNEbb7yhy5cvt9hvGIYsFouys7M1efJkBQX17J+RtgoWni4KeTtuU1OTsrOzW8T7Prvdrk8++UQxMTE9FhcA2pKTk6MBAwYoMzNThmHolVdeUUlJia5du+Ya0xcL5W3FjI2NVVxcnCoqKlx9/fr1U35+vh5//HHZbDaPzAUAvGlGnvu5/OV/2aTnOn7euods2prw3XM/7emJAV3AklvoSfw8wdsozgAA3Ozfv7/VwkwzwzB05swZWa1W2e122e122Wy2Nr92dp/VatWLL77YasGiuS8zM1O1tbWudf2/P7a7j51Op5YsWdJu3Icffljl5eVqampSY2PjTbcrV660uCvpRg6HQ8ePH6c4A8Br5s2bp9DQUM2aNUtOp9OtMCP1vUJ5ezFrampUU1Pj2k5PT9e6det022239Wh8APAlIaNCFBQRpOsXrrc5xjrIqpBRIV6cFQDAE1g6zjdQnAEAP9fd5QykbwsIDoejp6fUprq6Oj366KNei9esvr7e9YHZ3sSSEQC8bfr06Vq4cKHOnz/fYl9zAWPu3Ln64IMPFBgYKIvF4vpsmu4+tlgscjqdWr9+fYeF8r/97W+uAv3NcjqdWrNmTbt3MQYEBGjbtm2aMWMGn8Hjw25893938e5/+DtLgEWxs2N1+qXTbY6JmRUjS8DN5UPemQ4AwLcozgCAn+vucga2GJsCrAEyrhtyXnfqFtstamhokMPhcH1t74KXP7BYLLJarbpuuS4FSpZAiyyBFhlOQ031TR0+nyUj0JpPv6w0ewrow/bt29dqYeb7Ll++7PpsMm+qr69XYWGhV2M6nU5FR0dTmAHgN8KTw6WFUk1JjdsdNNZBVsXMivl2PwAA6BEUZwB0GhcE/UNnlzMY+cxIt3fNfTrXvXRgGIaamppaFGwaGhrU0NCgyW9NlnHdcLVrX1zTV9u/6nB+kRMjZY+zu7ZXjFvhevz9i2dtPV5yYMl3B7NIjmqHzr/b/oVISYr57xgF3xYsBUrbp22X1WrtsDV/gPWdW+90/944DX3++OcsGQHAdDfmp4uHLpozER82p2yOBv7vQNf2jX/vAKCvCU8OV9hdYbry+RVd//q6gsKDFDIq5KbvmDETy/cAAHyR6cWZ9evXq6ioSDU1NRo9erSKi4v105/+tM3xe/bs0aJFi3Ts2DHFxcVp8eLFysrKchuzfft2LV26VCdPntSIESNUWFioadOm3VRcwJdQJIEn9dRyBhaLRUFBQQoKClJwcHCL/f2H9nfbDkkIUe2HtR0WLGJmuseeN3deu/O40QuWF9y2Daehiwcvdhg38p5IV9zExMQuxbyRt5aM8Cdm5UUz4nJxAZ4UFN65fw/i5sap39B+ru1t922TYRiuOyY7evzwXx52u7vy2v9e01elHRfoo2dEq3/8d38/Nk/Y3Kn5StKju9yXxbxWeU1f/U/HMTv7PYF5ODcGep4lwKLQhFCzp4FehHNUAOg6U//TKCsrU05OjtavX69x48Zp06ZNuvfee/XZZ58pPj6+xfgvv/xS9913nzIzM/WHP/xBBw4c0IIFCzR48GA98MADkqTy8nLNnDlTv/vd7zRt2jT98Y9/1IwZM7R//36lpKR0Ky56B3+6MGcWs062/Ol77CvMWM7ArIKFWXG98T0243eH31egd+vs3ZMR4yPc8mLzeXan45xyvzMwZFSIat/vuEAflR7lFveee+7pdMzQaveLjCH/GaLa3R3H5C5GAEB3cW4MAGiPqcWZVatWad68eZo/f74kqbi4WO+//742bNigZ599tsX4jRs3Kj4+XsXFxZKkhIQEHTlyRCtXrnQVZ4qLi3XPPfcoLy9PkpSXl6c9e/aouLhYb775Zrfiouv86QTEjIKFv70jhQ+MNEfzcgaNnzdqUuMk/dn6Z1lHWT16N4dZa1ybGdeTS0b4U37qzXG7GhPwJH8qlHsrJm8g8jx/OzcGAAB9B+cx/s204kxDQ4MqKir05JNPuvVPmDBBBw8ebPU55eXlmjBhglvfxIkTtWXLFjU2Nspqtaq8vFy5ubktxjQXdLoTV5IcDoccDodr++uvv5Yk1dXVqbGxsf0X2wmNjY26evWqamtrZbVab/p4Zku5/PJNH+Nwba3XY3YnbtD1Kz0St7YLcc2I2dvj+tNr7Wrca58ubXe/PcBQcrJTfzn6X/rmWNsXqLr6WtuKa7NL8RlN+qbquK5fvqCg0Aj1+48fyBIQqGutLPNvRtyeitksUFJgiKTr0jfH2h7XG36e/Ol3p6fi+tNr7WrcjmIGOQ1dvepUUGOAmpw9l5968/e4J/KTzS5F339Y5z96TU2XL7j6AwcMUtTdGbLZUzySFz0d16zXatY5qhnn4735d6ercclPvSNmb4lr1mvt6BzVU3H96efJF69TkJ98I6a/xeW1ejZmb4rbnr5yffzSpUuS5LaMc5sMk1RVVRmSjAMHDrj1FxYWGnfccUerzxk5cqRRWFjo1nfgwAFDklFdXW0YhmFYrVajpKTEbUxJSYlhs9m6HdcwDKOgoMCQRKPRaDQajUaj0Wg0Go1Go9FoNBqN1mY7ffp0hzUS0z/d0mJxr9IbhtGir6PxN/Z35phdjZuXl6dFixa5tp1Op+rq6hQZGdnu8zqrvr5et956q06fPq2wsLCbPh4A9BTyEwBfRX4C4KvITwB8FfkJgK/qK/nJMAxdunRJcXFxHY41rTgTFRWlwMBAnT171q3/3Llzio6ObvU5MTExrY4PCgpSZGRku2Oaj9mduJJkt9tlt9vd+gYOHNj2C+ymsLCwXv3DB6DvIj8B8FXkJwC+ivwEwFeRnwD4qr6Qn8LDwzs1LsDD82iTzWZTUlKSdu/e7da/e/dujR07ttXnpKamthi/a9cuJScnu9aha2tM8zG7ExcAAAAAAAAAAKCnmLqs2aJFizRnzhwlJycrNTVVmzdvVmVlpbKysiR9u5RYVVWVXn/9dUlSVlaWXnrpJS1atEiZmZkqLy/Xli1b9Oabb7qOmZ2drZ/97Gd67rnnNGXKFL399tv64IMPtH///k7HBQAAAAAAAAAA8BRTizMzZ85UbW2tVqxYoZqaGo0ZM0Y7d+7U0KFDJUk1NTWqrKx0jR8+fLh27typ3NxcrVu3TnFxcVqzZo0eeOAB15ixY8eqtLRUS5Ys0dKlSzVixAiVlZUpJSWl03HNYLfbVVBQ0GLpNAAwG/kJgK8iPwHwVeQnAL6K/ATAV/ljfrIYhmGYPQkAAAAAAAAAAAB/YdpnzgAAAAAAAAAAAPgjijMAAAAAAAAAAABeRHEGAAAAAAAAAADAiyjOAAAAAAAAAAAAeBHFGR8wbNgwWSwWt/bkk0+6jamsrNT999+vkJAQRUVF6bHHHlNDQ4NJMwbgbxwOhxITE2WxWPT3v//dbR/5CYAZJk+erPj4ePXr10+xsbGaM2eOqqur3caQnwB426lTpzRv3jwNHz5c/fv314gRI1RQUNAi95CfAJihsLBQY8eOVXBwsAYOHNjqGPITALOsX79ew4cPV79+/ZSUlKR9+/aZPSWPCzJ7AvjWihUrlJmZ6doODQ11PW5qatLPf/5zDR48WPv371dtba3mzp0rwzC0du1aM6YLwM8sXrxYcXFx+sc//uHWT34CYJa0tDQ99dRTio2NVVVVlX7zm99o+vTpOnjwoCTyEwBzHD9+XE6nU5s2bdLtt9+uf/7zn8rMzNSVK1e0cuVKSeQnAOZpaGjQgw8+qNTUVG3ZsqXFfvITALOUlZUpJydH69ev17hx47Rp0ybde++9+uyzzxQfH2/29DzGYhiGYfYk/N2wYcOUk5OjnJycVve/9957mjRpkk6fPq24uDhJUmlpqTIyMnTu3DmFhYV5cbYA/M17772nRYsWafv27Ro9erSOHj2qxMRE1z7yEwBf8M4772jq1KlyOByyWq3kJwA+o6ioSBs2bNAXX3whifMnAOb7/e9/r5ycHF28eNGtn/wEwCwpKSm66667tGHDBldfQkKCpk6dqmeffdbEmXkWy5r5iOeee06RkZFKTExUYWGh2y2j5eXlGjNmjOsPoyRNnDhRDodDFRUVZkwXgJ/46quvlJmZqTfeeEPBwcEt9pOfAPiCuro6lZSUaOzYsbJarZLITwB8x9dff61Bgwa5tslPAHwV+QmAGRoaGlRRUaEJEya49U+YMMG1MkJfRXHGB2RnZ6u0tFQfffSRFi5cqOLiYi1YsMC1/+zZs4qOjnZ7TkREhGw2m86ePevt6QLwE4ZhKCMjQ1lZWUpOTm51DPkJgJl++9vfKiQkRJGRkaqsrNTbb7/t2kd+AuALTp48qbVr1yorK8vVR34C4KvITwDMcP78eTU1NbXIP9HR0X0+91Cc8ZBly5bJYrG0244cOSJJys3N1fjx4/XDH/5Q8+fP18aNG7VlyxbV1ta6jmexWFrEMAyj1X4AaE9n89PatWtVX1+vvLy8do9HfgLQU7py/iRJTzzxhI4ePapdu3YpMDBQDz30kL6/Yi/5CUBP6Wp+kqTq6mqlp6frwQcf1Pz58932kZ8A9JTu5Kf2kJ8AmOXGPOMPuSfI7An0VQsXLtQvf/nLdscMGzas1f4f//jHkqQTJ04oMjJSMTExOnz4sNuYCxcuqLGxsUVFEQA60tn89PTTT+vQoUOy2+1u+5KTkzV79mxt3bqV/ASgR3X1/CkqKkpRUVG64447lJCQoFtvvVWHDh1Samoq+QlAj+pqfqqurlZaWppSU1O1efNmt3HkJwA96WauP92I/ATADFFRUQoMDGxxl8y5c+f6fO6hOOMhzRcLuuPo0aOSpNjYWElSamqqCgsLVVNT4+rbtWuX7Ha7kpKSembCAPxGZ/PTmjVr9PTTT7u2q6urNXHiRJWVlSklJUUS+QlAz7qZ86fmO2YcDock8hOAntWV/FRVVaW0tDQlJSXptddeU0CA+4IV5CcAPelmzp9uRH4CYAabzaakpCTt3r1b06ZNc/Xv3r1bU6ZMMXFmnmcxvr/2A7yuvLxchw4dUlpamsLDw/XJJ58oNzdXycnJrnXTm5qalJiYqOjoaBUVFamurk4ZGRmaOnWq1q5da/IrAOAvTp06peHDh+vo0aNKTEyURH4CYI6PP/5YH3/8sX7yk58oIiJCX3zxhfLz81VTU6Njx47JbreTnwCYorq6WuPHj1d8fLxef/11BQYGuvbFxMRI4vwJgHkqKytVV1end955R0VFRdq3b58k6fbbb1doaCj5CYBpysrKNGfOHG3cuNF15/HLL7+sY8eOaejQoWZPz2O4c8ZkdrtdZWVlWr58uRwOh4YOHarMzEwtXrzYNSYwMFDvvvuuFixYoHHjxql///6aNWuWVq5caeLMAYD8BMAc/fv3144dO1RQUKArV64oNjZW6enpKi0tdS3FSH4CYIZdu3bpxIkTOnHihIYMGeK2r/l9keQnAGbJz8/X1q1bXds/+tGPJEkfffSR7r77bvITANPMnDlTtbW1WrFihWpqajRmzBjt3LmzTxdmJO6cAQAAAAAAAAAA8KqAjocAAAAAAAAAAACgp1CcAQAAAAAAAAAA8CKKMwAAAAAAAAAAAF5EcQYAAAAAAAAAAMCLKM4AAAAAAAAAAAB4EcUZAAAAAAAAAAAAL6I4AwAAAAAAAAAA4EUUZwAAAAAAAAAAALyI4gwAAAAAdGDZsmVKTEw0exoAAAAA+giLYRiG2ZMAAAAAALNYLJZ298+dO1cvvfSSHA6HIiMjvTQrAAAAAH0ZxRkAAAAAfu3s2bOux2VlZcrPz9fnn3/u6uvfv7/Cw8PNmBoAAACAPoplzQAAAAD4tZiYGFcLDw+XxWJp0XfjsmYZGRmaOnWqnnnmGUVHR2vgwIFavny5rl+/rieeeEKDBg3SkCFD9Oqrr7rFqqqq0syZMxUREaHIyEhNmTJFp06d8u4LBgAAAGA6ijMAAAAA0A0ffvihqqurtXfvXq1atUrLli3TpEmTFBERocOHDysrK0tZWVk6ffq0JOnq1atKS0tTaGio9u7dq/379ys0NFTp6elqaGgw+dUAAAAA8CaKMwAAAADQDYMGDdKaNWs0atQoPfLIIxo1apSuXr2qp556SiNHjlReXp5sNpsOHDggSSotLVVAQIBeeeUV3XnnnUpISNBrr72myspK/fWvfzX3xQAAAADwqiCzJwAAAAAAvdHo0aMVEPDd+92io6M1ZswY13ZgYKAiIyN17tw5SVJFRYVOnDihAQMGuB3nm2++0cmTJ70zaQAAAAA+geIMAAAAAHSD1Wp127ZYLK32OZ1OSZLT6VRSUpJKSkpaHGvw4MGemygAAAAAn0NxBgAAAAC84K677lJZWZluueUWhYWFmT0dAAAAACbiM2cAAAAAwAtmz56tqKgoTZkyRfv27dOXX36pPXv2KDs7W2fOnDF7egAAAAC8iOIMAAAAAHhBcHCw9u7dq/j4eP3iF79QQkKCHnnkEV27do07aQAAAAA/YzEMwzB7EgAAAAAAAAAAAP6CO2cAAAAAAAAAAAC8iOIMAAAAAAAAAACAF1GcAQAAAAAAAAAA8CKKMwAAAAAAAAAAAF5EcQYAAAAAAAAAAMCLKM4AAAAAAAAAAAB4EcUZAAAAAAAAAAAAL6I4AwAAAAAAAAAA4EUUZwAAAAAAAAAAALyI4gwAAAAAAAAAAIAXUZwBAAAAAAAAAADwov8Dajk8oR9uXncAAAAASUVORK5CYII=", - "text/plain": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "df= feature_importances['Past variable importance over time']\n", @@ -2230,101 +1485,7 @@ "cell_type": "code", "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
trendy_[lag12]monthobserved_targetCorrelation with Mean Attention
trend1.000.69-0.770.440.40
y_[lag12]0.691.00-0.790.410.22
month-0.77-0.791.00-0.72-0.48
observed_target0.440.41-0.721.000.74
Correlation with Mean Attention0.400.22-0.480.741.00
\n", - "
" - ], - "text/plain": [ - " trend y_[lag12] month observed_target \\\n", - "trend 1.00 0.69 -0.77 0.44 \n", - "y_[lag12] 0.69 1.00 -0.79 0.41 \n", - "month -0.77 -0.79 1.00 -0.72 \n", - "observed_target 0.44 0.41 -0.72 1.00 \n", - "Correlation with Mean Attention 0.40 0.22 -0.48 0.74 \n", - "\n", - " Correlation with Mean Attention \n", - "trend 0.40 \n", - "y_[lag12] 0.22 \n", - "month -0.48 \n", - "observed_target 0.74 \n", - "Correlation with Mean Attention 1.00 " - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "#| eval: false\n", "nf.models[0].feature_importance_correlations()" diff --git a/neuralforecast/auto.py b/neuralforecast/auto.py index f0d763220..b3c85892a 100644 --- a/neuralforecast/auto.py +++ b/neuralforecast/auto.py @@ -1,5 +1,3 @@ -"""NeuralForecast contains user-friendly implementations of neural forecasting models that allow for easy transition of computing capabilities (GPU/CPU), computation parallelization, and hyperparameter tuning.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/models.ipynb. # %% auto 0 diff --git a/neuralforecast/common/_base_auto.py b/neuralforecast/common/_base_auto.py index 19af4c5c1..a44f86267 100644 --- a/neuralforecast/common/_base_auto.py +++ b/neuralforecast/common/_base_auto.py @@ -1,5 +1,3 @@ -"""Machine Learning forecasting methods are defined by many hyperparameters that control their behavior, with effects ranging from their speed and memory requirements to their predictive performance. For a long time, manual hyperparameter tuning prevailed. This approach is time-consuming, **automated hyperparameter optimization** methods have been introduced, proving more efficient than manual tuning, grid search, and random search.

The `BaseAuto` class offers shared API connections to hyperparameter optimization algorithms like [Optuna](https://docs.ray.io/en/latest/tune/examples/bayesopt_example.html), [HyperOpt](https://docs.ray.io/en/latest/tune/examples/hyperopt_example.html), [Dragonfly](https://docs.ray.io/en/latest/tune/examples/dragonfly_example.html) among others through `ray`, which gives you access to grid search, bayesian optimization and other state-of-the-art tools like hyperband.

Comprehending the impacts of hyperparameters is still a precious skill, as it can help guide the design of informed hyperparameter spaces that are faster to explore automatically.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.base_auto.ipynb. # %% auto 0 diff --git a/neuralforecast/common/_base_multivariate.py b/neuralforecast/common/_base_multivariate.py index 42c32a761..e068ade73 100644 --- a/neuralforecast/common/_base_multivariate.py +++ b/neuralforecast/common/_base_multivariate.py @@ -1,5 +1,3 @@ -"""The `BaseWindows` class contains standard methods shared across window-based multivariate neural networks; in contrast to recurrent neural networks these models commit to a fixed sequence length input.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.base_multivariate.ipynb. # %% auto 0 diff --git a/neuralforecast/common/_base_windows.py b/neuralforecast/common/_base_windows.py index 9310d5439..1325d61a3 100644 --- a/neuralforecast/common/_base_windows.py +++ b/neuralforecast/common/_base_windows.py @@ -1,5 +1,3 @@ -"""The `BaseWindows` class contains standard methods shared across window-based neural networks; in contrast to recurrent neural networks these models commit to a fixed sequence length input. The class is represented by `MLP`, and other more sophisticated architectures like `NBEATS`, and `NHITS`.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.base_windows.ipynb. # %% auto 0 diff --git a/neuralforecast/common/_scalers.py b/neuralforecast/common/_scalers.py index 182b580e8..c45b58d62 100644 --- a/neuralforecast/common/_scalers.py +++ b/neuralforecast/common/_scalers.py @@ -1,5 +1,3 @@ -"""Temporal normalization has proven to be essential in neural forecasting tasks, as it enables network's non-linearities to express themselves. Forecasting scaling methods take particular interest in the temporal dimension where most of the variance dwells, contrary to other deep learning techniques like `BatchNorm` that normalizes across batch and temporal dimensions, and `LayerNorm` that normalizes across the feature dimension. Currently we support the following techniques: `std`, `median`, `norm`, `norm1`, `invariant`, `revin`.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/common.scalers.ipynb. # %% auto 0 diff --git a/neuralforecast/core.py b/neuralforecast/core.py index 2df06d85a..85214f57a 100644 --- a/neuralforecast/core.py +++ b/neuralforecast/core.py @@ -1,5 +1,3 @@ -"""NeuralForecast contains two main components, PyTorch implementations deep learning predictive models, as well as parallelization and distributed computation utilities. The first component comprises low-level PyTorch model estimator classes like `models.NBEATS` and `models.RNN`. The second component is a high-level `core.NeuralForecast` wrapper class that operates with sets of time series data stored in pandas DataFrames.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/core.ipynb. # %% auto 0 diff --git a/neuralforecast/losses/numpy.py b/neuralforecast/losses/numpy.py index 630e094f4..fec5cec7e 100644 --- a/neuralforecast/losses/numpy.py +++ b/neuralforecast/losses/numpy.py @@ -1,5 +1,3 @@ -"""NeuralForecast contains a collection NumPy loss functions aimed to be used during the models' evaluation.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/losses.numpy.ipynb. # %% auto 0 diff --git a/neuralforecast/losses/pytorch.py b/neuralforecast/losses/pytorch.py index 0f972cc3a..a713b5b31 100644 --- a/neuralforecast/losses/pytorch.py +++ b/neuralforecast/losses/pytorch.py @@ -1,5 +1,3 @@ -"""NeuralForecast contains a collection PyTorch Loss classes aimed to be used during the models' optimization.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/losses.pytorch.ipynb. # %% auto 0 diff --git a/neuralforecast/models/mlp.py b/neuralforecast/models/mlp.py index 24f9facd4..9b20aec30 100644 --- a/neuralforecast/models/mlp.py +++ b/neuralforecast/models/mlp.py @@ -1,5 +1,3 @@ -"""One of the simplest neural architectures are Multi Layer Perceptrons (`MLP`) composed of stacked Fully Connected Neural Networks trained with backpropagation. Each node in the architecture is capable of modeling non-linear relationships granted by their activation functions. Novel activations like Rectified Linear Units (`ReLU`) have greatly improved the ability to fit deeper networks overcoming gradient vanishing problems that were associated with `Sigmoid` and `TanH` activations. For the forecasting task the last layer is changed to follow a auto-regression problem.

**References**
-[Rosenblatt, F. (1958). "The perceptron: A probabilistic model for information storage and organization in the brain."](https://psycnet.apa.org/record/1959-09865-001)
-[Fukushima, K. (1975). "Cognitron: A self-organizing multilayered neural network."](https://pascal-francis.inist.fr/vibad/index.php?action=getRecordDetail&idt=PASCAL7750396723)
-[Vinod Nair, Geoffrey E. Hinton (2010). "Rectified Linear Units Improve Restricted Boltzmann Machines"](https://www.cs.toronto.edu/~fritz/absps/reluICML.pdf)
""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.mlp.ipynb. # %% auto 0 diff --git a/neuralforecast/models/mlpmultivariate.py b/neuralforecast/models/mlpmultivariate.py index f11efaf7b..406c2b884 100644 --- a/neuralforecast/models/mlpmultivariate.py +++ b/neuralforecast/models/mlpmultivariate.py @@ -1,5 +1,3 @@ -"""One of the simplest neural architectures are Multi Layer Perceptrons (`MLP`) composed of stacked Fully Connected Neural Networks trained with backpropagation. Each node in the architecture is capable of modeling non-linear relationships granted by their activation functions. Novel activations like Rectified Linear Units (`ReLU`) have greatly improved the ability to fit deeper networks overcoming gradient vanishing problems that were associated with `Sigmoid` and `TanH` activations. For the forecasting task the last layer is changed to follow a auto-regression problem. This version is multivariate, indicating that it will predict all time series of the forecasting problem jointly.

**References**
-[Rosenblatt, F. (1958). "The perceptron: A probabilistic model for information storage and organization in the brain."](https://psycnet.apa.org/record/1959-09865-001)
-[Fukushima, K. (1975). "Cognitron: A self-organizing multilayered neural network."](https://pascal-francis.inist.fr/vibad/index.php?action=getRecordDetail&idt=PASCAL7750396723)
-[Vinod Nair, Geoffrey E. Hinton (2010). "Rectified Linear Units Improve Restricted Boltzmann Machines"](https://www.cs.toronto.edu/~fritz/absps/reluICML.pdf)
""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.mlpmultivariate.ipynb. # %% auto 0 diff --git a/neuralforecast/models/tide.py b/neuralforecast/models/tide.py index c01b73785..2d20e6fd5 100644 --- a/neuralforecast/models/tide.py +++ b/neuralforecast/models/tide.py @@ -1,5 +1,3 @@ -"""Time-series Dense Encoder (`TiDE`) is a MLP-based univariate time-series forecasting model. `TiDE` uses Multi-layer Perceptrons (MLPs) in an encoder-decoder model for long-term time-series forecasting. In addition, this model can handle exogenous inputs.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.tide.ipynb. # %% auto 0 diff --git a/neuralforecast/models/tsmixer.py b/neuralforecast/models/tsmixer.py index e8877d13e..605e2d0ff 100644 --- a/neuralforecast/models/tsmixer.py +++ b/neuralforecast/models/tsmixer.py @@ -1,5 +1,3 @@ -"""Time-Series Mixer (`TSMixer`) is a MLP-based multivariate time-series forecasting model. `TSMixer` jointly learns temporal and cross-sectional representations of the time-series by repeatedly combining time- and feature information using stacked mixing layers. A mixing layer consists of a sequential time- and feature Multi Layer Perceptron (`MLP`). Note: this model cannot handle exogenous inputs. If you want to use additional exogenous inputs, use `TSMixerx`.""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.tsmixer.ipynb. # %% auto 0 diff --git a/neuralforecast/models/tsmixerx.py b/neuralforecast/models/tsmixerx.py index 50ba93e6f..7997ed281 100644 --- a/neuralforecast/models/tsmixerx.py +++ b/neuralforecast/models/tsmixerx.py @@ -1,5 +1,3 @@ -"""Time-Series Mixer exogenous (`TSMixerx`) is a MLP-based multivariate time-series forecasting model, with capability for additional exogenous inputs. `TSMixerx` jointly learns temporal and cross-sectional representations of the time-series by repeatedly combining time- and feature information using stacked mixing layers. A mixing layer consists of a sequential time- and feature Multi Layer Perceptron (`MLP`).""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/models.tsmixerx.ipynb. # %% auto 0 diff --git a/neuralforecast/utils.py b/neuralforecast/utils.py index 2e076b158..4a272dfcb 100644 --- a/neuralforecast/utils.py +++ b/neuralforecast/utils.py @@ -1,5 +1,3 @@ -"""The `core.NeuralForecast` class allows you to efficiently fit multiple `NeuralForecast` models for large sets of time series. It operates with pandas DataFrame `df` that identifies individual series and datestamps with the `unique_id` and `ds` columns, and the `y` column denotes the target time series variable. To assist development, we declare useful datasets that we use throughout all `NeuralForecast`'s unit tests.

""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/utils.ipynb. # %% auto 0 From 39a7a717e5ae1b1088630dbcfe07819414f92d50 Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Tue, 22 Oct 2024 10:51:48 +1100 Subject: [PATCH 03/11] adding dataloader_kwargs argument instead --- nbs/common.base_model.ipynb | 4 +- nbs/common.base_multivariate.ipynb | 8 +-- nbs/common.base_recurrent.ipynb | 13 ++--- nbs/common.base_windows.ipynb | 8 +-- nbs/docs/tutorials/18_adding_models.ipynb | 9 +++- nbs/models.autoformer.ipynb | 12 ++--- nbs/models.bitcn.ipynb | 12 ++--- nbs/models.deepar.ipynb | 12 ++--- nbs/models.deepnpts.ipynb | 11 ++--- nbs/models.dilated_rnn.ipynb | 12 ++--- nbs/models.dlinear.ipynb | 12 ++--- nbs/models.fedformer.ipynb | 14 ++---- nbs/models.gru.ipynb | 12 ++--- nbs/models.informer.ipynb | 12 ++--- nbs/models.itransformer.ipynb | 18 ++----- nbs/models.kan.ipynb | 12 ++--- nbs/models.lstm.ipynb | 12 ++--- nbs/models.mlp.ipynb | 12 ++--- nbs/models.mlpmultivariate.ipynb | 15 ++---- nbs/models.nbeats.ipynb | 12 ++--- nbs/models.nbeatsx.ipynb | 12 ++--- nbs/models.nhits.ipynb | 12 ++--- nbs/models.nlinear.ipynb | 12 ++--- nbs/models.patchtst.ipynb | 12 ++--- nbs/models.rmok.ipynb | 18 ++----- nbs/models.rnn.ipynb | 14 ++---- nbs/models.softs.ipynb | 18 ++----- nbs/models.stemgnn.ipynb | 15 ++---- nbs/models.tcn.ipynb | 14 ++---- nbs/models.tft.ipynb | 12 ++--- nbs/models.tide.ipynb | 12 ++--- nbs/models.timellm.ipynb | 12 ++--- nbs/models.timemixer.ipynb | 18 ++----- nbs/models.timesnet.ipynb | 20 +++----- nbs/models.tsmixer.ipynb | 15 ++---- nbs/models.tsmixerx.ipynb | 15 ++---- nbs/models.vanillatransformer.ipynb | 12 ++--- nbs/tsdataset.ipynb | 24 +++------ neuralforecast/_modidx.py | 55 ++++++++++++++++++++- neuralforecast/common/_base_model.py | 4 +- neuralforecast/common/_base_multivariate.py | 8 +-- neuralforecast/common/_base_recurrent.py | 13 ++--- neuralforecast/common/_base_windows.py | 8 +-- neuralforecast/models/autoformer.py | 12 ++--- neuralforecast/models/bitcn.py | 12 ++--- neuralforecast/models/deepar.py | 12 ++--- neuralforecast/models/deepnpts.py | 11 ++--- neuralforecast/models/dilated_rnn.py | 12 ++--- neuralforecast/models/dlinear.py | 12 ++--- neuralforecast/models/fedformer.py | 12 ++--- neuralforecast/models/gru.py | 12 ++--- neuralforecast/models/informer.py | 12 ++--- neuralforecast/models/itransformer.py | 15 ++---- neuralforecast/models/kan.py | 12 ++--- neuralforecast/models/lstm.py | 12 ++--- neuralforecast/models/mlp.py | 12 ++--- neuralforecast/models/mlpmultivariate.py | 15 ++---- neuralforecast/models/nbeats.py | 12 ++--- neuralforecast/models/nbeatsx.py | 12 ++--- neuralforecast/models/nhits.py | 12 ++--- neuralforecast/models/nlinear.py | 12 ++--- neuralforecast/models/patchtst.py | 12 ++--- neuralforecast/models/rmok.py | 15 ++---- neuralforecast/models/rnn.py | 12 ++--- neuralforecast/models/softs.py | 15 ++---- neuralforecast/models/stemgnn.py | 15 ++---- neuralforecast/models/tcn.py | 12 ++--- neuralforecast/models/tft.py | 12 ++--- neuralforecast/models/tide.py | 12 ++--- neuralforecast/models/timellm.py | 12 ++--- neuralforecast/models/timemixer.py | 15 ++---- neuralforecast/models/timesnet.py | 16 ++---- neuralforecast/models/tsmixer.py | 15 ++---- neuralforecast/models/tsmixerx.py | 15 ++---- neuralforecast/models/vanillatransformer.py | 12 ++--- neuralforecast/tsdataset.py | 26 +++------- 76 files changed, 294 insertions(+), 730 deletions(-) diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb index df4dba6bb..0f1daeeb5 100644 --- a/nbs/common.base_model.ipynb +++ b/nbs/common.base_model.ipynb @@ -362,11 +362,9 @@ " dataset=dataset, \n", " batch_size=batch_size,\n", " valid_batch_size=valid_batch_size,\n", - " num_workers=self.num_workers_loader,\n", - " prefetch_factor=self.prefetch_factor,\n", " drop_last=self.drop_last_loader,\n", " shuffle_train=shuffle_train,\n", - " pin_memory=self.pin_memory,\n", + " **self.dataloader_kwargs\n", " )\n", "\n", " if self.val_check_steps > self.max_steps:\n", diff --git a/nbs/common.base_multivariate.ipynb b/nbs/common.base_multivariate.ipynb index e2dab29e6..647923866 100644 --- a/nbs/common.base_multivariate.ipynb +++ b/nbs/common.base_multivariate.ipynb @@ -101,16 +101,14 @@ " futr_exog_list=None,\n", " hist_exog_list=None,\n", " stat_exog_list=None,\n", - " num_workers_loader=0,\n", - " prefetch_factor=None,\n", " drop_last_loader=False,\n", - " pin_memory=False,\n", " random_seed=1, \n", " alias=None,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler=None,\n", " lr_scheduler_kwargs=None,\n", + " dataloader_kwargs=None,\n", " **trainer_kwargs):\n", " super().__init__(\n", " random_seed=random_seed,\n", @@ -174,10 +172,8 @@ " self.decompose_forecast = False\n", "\n", " # DataModule arguments\n", - " self.num_workers_loader = num_workers_loader\n", - " self.prefetch_factor=prefetch_factor\n", + " self.dataloader_kwargs=dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", - " self.pin_memory = pin_memory\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", " self.alias = alias\n", diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index 0297b1436..edb5c4cdb 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -107,16 +107,14 @@ " futr_exog_list=None,\n", " hist_exog_list=None,\n", " stat_exog_list=None,\n", - " num_workers_loader=0,\n", - " prefetch_factor=None,\n", " drop_last_loader=False,\n", - " pin_memory=False,\n", " random_seed=1, \n", " alias=None,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler=None,\n", " lr_scheduler_kwargs=None,\n", + " dataloader_kwargs=None,\n", " **trainer_kwargs):\n", " super().__init__(\n", " random_seed=random_seed,\n", @@ -173,10 +171,8 @@ " self.test_size = 0\n", "\n", " # DataModule arguments\n", - " self.num_workers_loader = num_workers_loader\n", - " self.prefetch_factor = prefetch_factor\n", + " self.dataloader_kwargs=dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", - " self.pin_memory = pin_memory\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", " self.alias = alias\n", @@ -555,10 +551,7 @@ " datamodule = TimeSeriesDataModule(\n", " dataset=dataset,\n", " valid_batch_size=self.valid_batch_size,\n", - " num_workers=self.num_workers_loader,\n", - " prefetch_factor = self.prefetch_factor,\n", - " pin_memory=self.pin_memory,\n", - " **data_module_kwargs\n", + " **self.dataloader_kwargs\n", " )\n", " fcsts = trainer.predict(self, datamodule=datamodule)\n", " if self.test_size > 0:\n", diff --git a/nbs/common.base_windows.ipynb b/nbs/common.base_windows.ipynb index 088d071c0..72a9cfeb4 100644 --- a/nbs/common.base_windows.ipynb +++ b/nbs/common.base_windows.ipynb @@ -111,16 +111,14 @@ " hist_exog_list=None,\n", " stat_exog_list=None,\n", " exclude_insample_y=False,\n", - " num_workers_loader=0,\n", - " prefetch_factor=None,\n", " drop_last_loader=False,\n", - " pin_memory=False,\n", " random_seed=1,\n", " alias=None,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler=None,\n", " lr_scheduler_kwargs=None,\n", + " dataloader_kwargs=None,\n", " **trainer_kwargs):\n", " super().__init__(\n", " random_seed=random_seed,\n", @@ -189,10 +187,8 @@ " self.decompose_forecast = False\n", "\n", " # DataModule arguments\n", - " self.num_workers_loader = num_workers_loader\n", - " self.prefetch_factor = prefetch_factor\n", + " self.dataloader_kwargs = dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", - " self.pin_memory = pin_memory\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", " self.alias = alias\n", diff --git a/nbs/docs/tutorials/18_adding_models.ipynb b/nbs/docs/tutorials/18_adding_models.ipynb index ad19a02b3..45058fe75 100644 --- a/nbs/docs/tutorials/18_adding_models.ipynb +++ b/nbs/docs/tutorials/18_adding_models.ipynb @@ -269,7 +269,6 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " **trainer_kwargs):\n", " # Inherit BaseWindows class\n", @@ -415,7 +414,13 @@ ] } ], - "metadata": {}, + "metadata": { + "kernelspec": { + "display_name": "python3", + "language": "python", + "name": "python3" + } + }, "nbformat": 4, "nbformat_minor": 2 } diff --git a/nbs/models.autoformer.ipynb b/nbs/models.autoformer.ipynb index f28597a5d..0badf8506 100644 --- a/nbs/models.autoformer.ipynb +++ b/nbs/models.autoformer.ipynb @@ -455,15 +455,13 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", "\t*References*
\n", @@ -507,14 +505,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs=None,\n", " **trainer_kwargs):\n", " super(Autoformer, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -536,15 +532,13 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.bitcn.ipynb b/nbs/models.bitcn.ipynb index 049f90abb..14f8ee602 100644 --- a/nbs/models.bitcn.ipynb +++ b/nbs/models.bitcn.ipynb @@ -175,15 +175,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References**
\n", @@ -220,14 +218,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs=None,\n", " **trainer_kwargs):\n", " super(BiTCN, self).__init__(\n", " h=h,\n", @@ -251,14 +247,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs\n", " )\n", "\n", diff --git a/nbs/models.deepar.ipynb b/nbs/models.deepar.ipynb index f6faff294..29fcb12bb 100644 --- a/nbs/models.deepar.ipynb +++ b/nbs/models.deepar.ipynb @@ -180,15 +180,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References**
\n", @@ -230,14 +228,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", - " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " if exclude_insample_y:\n", @@ -273,15 +269,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " self.horizon_backup = self.h # Used because h=0 during training\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index be1a50ba9..0b0fe0620 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -118,8 +118,6 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", @@ -127,6 +125,7 @@ " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References**
\n", @@ -165,14 +164,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'standard',\n", " random_seed: int = 1,\n", - " num_workers_loader = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", - " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " if exclude_insample_y:\n", @@ -205,15 +202,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " self.h = h\n", diff --git a/nbs/models.dilated_rnn.ipynb b/nbs/models.dilated_rnn.ipynb index 6510e6287..306292527 100644 --- a/nbs/models.dilated_rnn.ipynb +++ b/nbs/models.dilated_rnn.ipynb @@ -387,15 +387,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \"\"\"\n", " # Class attributes\n", @@ -429,14 +427,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'robust',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(DilatedRNN, self).__init__(\n", " h=h,\n", @@ -455,15 +451,13 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs\n", " )\n", "\n", diff --git a/nbs/models.dlinear.ipynb b/nbs/models.dlinear.ipynb index 69f4ef666..994ea0e1f 100644 --- a/nbs/models.dlinear.ipynb +++ b/nbs/models.dlinear.ipynb @@ -159,15 +159,13 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", "\t*References*
\n", @@ -202,14 +200,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs=None,\n", " **trainer_kwargs):\n", " super(DLinear, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -231,15 +227,13 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " \n", " # Architecture\n", diff --git a/nbs/models.fedformer.ipynb b/nbs/models.fedformer.ipynb index 2b27e82de..7dcb16553 100644 --- a/nbs/models.fedformer.ipynb +++ b/nbs/models.fedformer.ipynb @@ -448,15 +448,13 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " \"\"\"\n", @@ -499,14 +497,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(FEDformer, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -527,15 +523,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", - " lr_scheduler_kwargs=lr_scheduler_kwargs, \n", + " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs, \n", " **trainer_kwargs)\n", " # Architecture\n", " self.label_len = int(np.ceil(input_size * decoder_input_size_multiplier))\n", diff --git a/nbs/models.gru.ipynb b/nbs/models.gru.ipynb index 155e53f92..93e2032c2 100644 --- a/nbs/models.gru.ipynb +++ b/nbs/models.gru.ipynb @@ -121,15 +121,13 @@ " `valid_batch_size`: int=None, number of different series in each validation and test batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \"\"\"\n", " # Class attributes\n", @@ -164,14 +162,12 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str='robust',\n", " random_seed=1,\n", - " num_workers_loader=0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", - " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(GRU, self).__init__(\n", " h=h,\n", @@ -190,15 +186,13 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs\n", " )\n", "\n", diff --git a/nbs/models.informer.ipynb b/nbs/models.informer.ipynb index 2da2ba79c..dffc74e8b 100644 --- a/nbs/models.informer.ipynb +++ b/nbs/models.informer.ipynb @@ -303,15 +303,13 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", "\t*References*
\n", @@ -355,14 +353,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(Informer, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -384,15 +380,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.itransformer.ipynb b/nbs/models.itransformer.ipynb index 850511825..e3ad45c0a 100644 --- a/nbs/models.itransformer.ipynb +++ b/nbs/models.itransformer.ipynb @@ -194,10 +194,6 @@ "outputs": [], "source": [ "#| export\n", - "\n", - "from typing import Optional\n", - "\n", - "\n", "class iTransformer(BaseMultivariate):\n", "\n", " \"\"\" iTransformer\n", @@ -228,15 +224,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \n", " **References**
\n", @@ -275,14 +269,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", - " lr_scheduler_kwargs = None, \n", + " lr_scheduler_kwargs = None, \n", + " dataloader_kwargs = None, \n", " **trainer_kwargs):\n", " \n", " super(iTransformer, self).__init__(h=h,\n", @@ -302,14 +294,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " \n", " self.enc_in = n_series\n", diff --git a/nbs/models.kan.ipynb b/nbs/models.kan.ipynb index 3900ada2f..3577b61d5 100644 --- a/nbs/models.kan.ipynb +++ b/nbs/models.kan.ipynb @@ -359,13 +359,11 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References**
\n", @@ -410,12 +408,10 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " \n", " # Inherit BaseWindows class\n", @@ -439,13 +435,11 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", + " dataloader_kwargs = dataloader_kwargs,\n", " **trainer_kwargs)\n", " \n", " # Architecture\n", diff --git a/nbs/models.lstm.ipynb b/nbs/models.lstm.ipynb index 922662c8d..389bc6ac5 100644 --- a/nbs/models.lstm.ipynb +++ b/nbs/models.lstm.ipynb @@ -118,15 +118,13 @@ " `valid_batch_size`: int=None, number of different series in each validation and test batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \"\"\"\n", " # Class attributes\n", @@ -160,14 +158,12 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str = 'robust',\n", " random_seed = 1,\n", - " num_workers_loader = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", - " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(LSTM, self).__init__(\n", " h=h,\n", @@ -186,15 +182,13 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs\n", " )\n", "\n", diff --git a/nbs/models.mlp.ipynb b/nbs/models.mlp.ipynb index 948c6a26c..b644cf959 100644 --- a/nbs/models.mlp.ipynb +++ b/nbs/models.mlp.ipynb @@ -111,15 +111,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \"\"\"\n", " # Class attributes\n", @@ -152,14 +150,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " # Inherit BaseWindows class\n", @@ -183,15 +179,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.mlpmultivariate.ipynb b/nbs/models.mlpmultivariate.ipynb index d67875651..dfcab59d8 100644 --- a/nbs/models.mlpmultivariate.ipynb +++ b/nbs/models.mlpmultivariate.ipynb @@ -76,9 +76,6 @@ "outputs": [], "source": [ "#| export\n", - "from typing import Optional\n", - "\n", - "\n", "class MLPMultivariate(BaseMultivariate):\n", " \"\"\" MLPMultivariate\n", "\n", @@ -108,15 +105,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \"\"\"\n", " # Class attributes\n", @@ -145,14 +140,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " # Inherit BaseMultivariate class\n", @@ -172,15 +165,13 @@ " batch_size=batch_size,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.nbeats.ipynb b/nbs/models.nbeats.ipynb index aed5c93cc..a855c4f49 100644 --- a/nbs/models.nbeats.ipynb +++ b/nbs/models.nbeats.ipynb @@ -267,15 +267,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -314,14 +312,12 @@ " step_size: int = 1,\n", " scaler_type: str ='identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " \n", " # Protect horizon collapsed seasonality and trend NBEATSx-i basis\n", @@ -347,15 +343,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.nbeatsx.ipynb b/nbs/models.nbeatsx.ipynb index 0045ccfd9..04f56ea5b 100644 --- a/nbs/models.nbeatsx.ipynb +++ b/nbs/models.nbeatsx.ipynb @@ -411,15 +411,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random seed initialization for replicability.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -464,14 +462,12 @@ " step_size: int = 1,\n", " scaler_type: str = \"identity\",\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs,\n", " ):\n", " # Protect horizon collapsed seasonality and trend NBEATSx-i basis\n", @@ -501,15 +497,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size = step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.nhits.ipynb b/nbs/models.nhits.ipynb index 843cbb5c9..43351c6a4 100644 --- a/nbs/models.nhits.ipynb +++ b/nbs/models.nhits.ipynb @@ -300,15 +300,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -353,14 +351,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", - " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " # Inherit BaseWindows class\n", @@ -384,15 +380,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.nlinear.ipynb b/nbs/models.nlinear.ipynb index a053e3cbe..974256002 100644 --- a/nbs/models.nlinear.ipynb +++ b/nbs/models.nlinear.ipynb @@ -99,15 +99,13 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", "\t*References*
\n", @@ -141,14 +139,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(NLinear, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -170,15 +166,13 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.patchtst.ipynb b/nbs/models.patchtst.ipynb index 888b3bbc1..b5ecd50c1 100644 --- a/nbs/models.patchtst.ipynb +++ b/nbs/models.patchtst.ipynb @@ -659,15 +659,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -718,14 +716,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(PatchTST, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -747,15 +743,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs) \n", "\n", " # Enforce correct patch_len, regardless of user input\n", diff --git a/nbs/models.rmok.ipynb b/nbs/models.rmok.ipynb index 74da2ce5d..170e8c730 100644 --- a/nbs/models.rmok.ipynb +++ b/nbs/models.rmok.ipynb @@ -330,10 +330,6 @@ "outputs": [], "source": [ "#| export\n", - "\n", - "from typing import Optional\n", - "\n", - "\n", "class RMoK(BaseMultivariate):\n", " \"\"\" Reversible Mixture of KAN\n", " **Parameters**
\n", @@ -359,15 +355,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " Reference
\n", @@ -403,14 +397,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", - " lr_scheduler_kwargs = None, \n", + " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None, \n", " **trainer_kwargs):\n", " \n", " super(RMoK, self).__init__(h=h,\n", @@ -430,14 +422,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " \n", " self.input_size = input_size\n", diff --git a/nbs/models.rnn.ipynb b/nbs/models.rnn.ipynb index 7da5c274d..6fda8deae 100644 --- a/nbs/models.rnn.ipynb +++ b/nbs/models.rnn.ipynb @@ -123,14 +123,12 @@ " `valid_batch_size`: int=None, number of different series in each validation and test batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `alias`: str, optional, Custom name of the model.
\n", "\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", @@ -167,14 +165,12 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str='robust',\n", " random_seed=1,\n", - " num_workers_loader=0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader=False,\n", - " pin_memory=False,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler = None,\n", - " lr_scheduler_kwargs = None, \n", + " lr_scheduler_kwargs = None, \n", + " dataloader_kwargs = None, \n", " **trainer_kwargs):\n", " super(RNN, self).__init__(\n", " h=h,\n", @@ -193,15 +189,13 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs\n", " )\n", "\n", diff --git a/nbs/models.softs.ipynb b/nbs/models.softs.ipynb index 9e0603a9a..8738b216f 100644 --- a/nbs/models.softs.ipynb +++ b/nbs/models.softs.ipynb @@ -168,10 +168,6 @@ "outputs": [], "source": [ "#| export\n", - "\n", - "from typing import Optional\n", - "\n", - "\n", "class SOFTS(BaseMultivariate):\n", "\n", " \"\"\" SOFTS\n", @@ -200,15 +196,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \n", " **References**
\n", @@ -245,14 +239,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", - " lr_scheduler_kwargs = None, \n", + " lr_scheduler_kwargs = None, \n", + " dataloader_kwargs = None, \n", " **trainer_kwargs):\n", " \n", " super(SOFTS, self).__init__(h=h,\n", @@ -272,14 +264,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " \n", " self.h = h\n", diff --git a/nbs/models.stemgnn.ipynb b/nbs/models.stemgnn.ipynb index 3e2bf0ec5..357a6985f 100644 --- a/nbs/models.stemgnn.ipynb +++ b/nbs/models.stemgnn.ipynb @@ -171,9 +171,6 @@ "outputs": [], "source": [ "#| export\n", - "from typing import Optional\n", - "\n", - "\n", "class StemGNN(BaseMultivariate):\n", " \"\"\" StemGNN\n", "\n", @@ -204,15 +201,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \"\"\"\n", " # Class attributes\n", @@ -243,14 +238,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'robust',\n", " random_seed: int = 1,\n", - " num_workers_loader = 0,\n", - " prefetch_factor = None,\n", " drop_last_loader = False,\n", - " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " # Inherit BaseMultivariate class\n", @@ -270,15 +263,13 @@ " batch_size=batch_size,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " # Quick fix for now, fix the model later.\n", " if n_stacks != 2:\n", diff --git a/nbs/models.tcn.ipynb b/nbs/models.tcn.ipynb index cbfc3cd01..ffa490066 100644 --- a/nbs/models.tcn.ipynb +++ b/nbs/models.tcn.ipynb @@ -123,15 +123,13 @@ " `val_check_steps`: int=100, Number of training steps between every validation loss check.
`batch_size`: int=32, number of differentseries in each batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", " \"\"\"\n", " # Class attributes\n", @@ -165,14 +163,12 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str ='robust',\n", " random_seed: int = 1,\n", - " num_workers_loader = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader = False,\n", - " pin_memory = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", - " lr_scheduler_kwargs = None, \n", + " lr_scheduler_kwargs = None, \n", + " dataloader_kwargs = None, \n", " **trainer_kwargs):\n", " super(TCN, self).__init__(\n", " h=h,\n", @@ -191,15 +187,13 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs = dataloader_kwargs,\n", " **trainer_kwargs\n", " )\n", "\n", diff --git a/nbs/models.tft.ipynb b/nbs/models.tft.ipynb index cd9591810..6e313bb1a 100644 --- a/nbs/models.tft.ipynb +++ b/nbs/models.tft.ipynb @@ -693,15 +693,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random seed initialization for replicability.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -742,15 +740,13 @@ " start_padding_enabled=False,\n", " step_size: int = 1,\n", " scaler_type: str = \"robust\",\n", - " num_workers_loader=0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader=False,\n", - " pin_memory=False,\n", " random_seed: int = 1,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler=None,\n", " lr_scheduler_kwargs=None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs,\n", " ):\n", "\n", @@ -775,15 +771,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs,\n", " )\n", " self.example_length = input_size + h\n", diff --git a/nbs/models.tide.ipynb b/nbs/models.tide.ipynb index 2fe4f4162..3b096a26f 100644 --- a/nbs/models.tide.ipynb +++ b/nbs/models.tide.ipynb @@ -164,15 +164,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -215,14 +213,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " # Inherit BaseWindows class\n", @@ -248,14 +244,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs\n", " ) \n", " self.h = h\n", diff --git a/nbs/models.timellm.ipynb b/nbs/models.timellm.ipynb index 6cb6562a4..812515882 100755 --- a/nbs/models.timellm.ipynb +++ b/nbs/models.timellm.ipynb @@ -288,15 +288,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -346,15 +344,13 @@ " num_lr_decays: int = 0,\n", " early_stop_patience_steps: int = -1,\n", " scaler_type: str = 'identity',\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " random_seed: int = 1,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(TimeLLM, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -375,15 +371,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " \n", " # Architecture\n", diff --git a/nbs/models.timemixer.ipynb b/nbs/models.timemixer.ipynb index f5779bcff..485d971eb 100644 --- a/nbs/models.timemixer.ipynb +++ b/nbs/models.timemixer.ipynb @@ -323,10 +323,6 @@ "outputs": [], "source": [ "#| export\n", - "\n", - "from typing import Optional\n", - "\n", - "\n", "class TimeMixer(BaseMultivariate):\n", " \"\"\" TimeMixer\n", " **Parameters**
\n", @@ -360,15 +356,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References**
\n", @@ -412,14 +406,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", - " lr_scheduler_kwargs = None, \n", + " lr_scheduler_kwargs = None, \n", + " dataloader_kwargs = None, \n", " **trainer_kwargs):\n", " \n", " super(TimeMixer, self).__init__(h=h,\n", @@ -439,14 +431,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " \n", " self.label_len = int(np.ceil(input_size * decoder_input_size_multiplier))\n", diff --git a/nbs/models.timesnet.ipynb b/nbs/models.timesnet.ipynb index 5a06ca67c..bc85e7126 100644 --- a/nbs/models.timesnet.ipynb +++ b/nbs/models.timesnet.ipynb @@ -259,20 +259,16 @@ " Type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " random_seed : int (default=1)\n", " Random_seed for pytorch initializer and numpy generators.\n", - " num_workers_loader : int (default=0)\n", - " Workers to be used by `TimeSeriesDataLoader`.\n", - " 'prefetch_factor': int (default=None) \n", - " Number of batches to be prefetched by the worker.\n", " drop_last_loader : bool (default=False)\n", " If True `TimeSeriesDataLoader` drops last non-full batch.\n", - " `pin_memory`: bool (default=False) \n", - " If True `TimeSeriesDataLoader` uses pinned memory.\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)\n", " User specified optimizer instead of the default choice (Adam).\n", " `optimizer_kwargs`: dict, optional (defualt=None)\n", " List of parameters used by the user specified `optimizer`.\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional (default=None)\n", + " List of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " **trainer_kwargs\n", " Keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer)\n", "\n", @@ -314,14 +310,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'standard',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", - " lr_scheduler_kwargs = None, \n", + " lr_scheduler_kwargs = None, \n", + " dataloader_kwargs = None, \n", " **trainer_kwargs):\n", " super(TimesNet, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -343,15 +337,13 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", - " lr_scheduler_kwargs=lr_scheduler_kwargs, \n", + " lr_scheduler_kwargs=lr_scheduler_kwargs, \n", + " dataloader_kwargs=dataloader_kwargs, \n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/models.tsmixer.ipynb b/nbs/models.tsmixer.ipynb index 324c9c8fd..55080cad9 100644 --- a/nbs/models.tsmixer.ipynb +++ b/nbs/models.tsmixer.ipynb @@ -220,9 +220,6 @@ "outputs": [], "source": [ "#| export\n", - "from typing import Optional\n", - "\n", - "\n", "class TSMixer(BaseMultivariate):\n", " \"\"\" TSMixer\n", "\n", @@ -250,15 +247,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -293,14 +288,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " # Inherit BaseMultivariate class\n", @@ -321,14 +314,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Reversible InstanceNormalization layer\n", diff --git a/nbs/models.tsmixerx.ipynb b/nbs/models.tsmixerx.ipynb index ca4dd0269..74ba735eb 100644 --- a/nbs/models.tsmixerx.ipynb +++ b/nbs/models.tsmixerx.ipynb @@ -244,9 +244,6 @@ "outputs": [], "source": [ "#| export\n", - "from typing import Optional\n", - "\n", - "\n", "class TSMixerx(BaseMultivariate):\n", " \"\"\" TSMixerx\n", "\n", @@ -274,15 +271,13 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", " **References:**
\n", @@ -317,14 +312,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", "\n", " # Inherit BaseMultvariate class\n", @@ -345,14 +338,12 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", " # Reversible InstanceNormalization layer\n", " self.revin = revin\n", diff --git a/nbs/models.vanillatransformer.ipynb b/nbs/models.vanillatransformer.ipynb index 75df8bcbd..232de7dfa 100644 --- a/nbs/models.vanillatransformer.ipynb +++ b/nbs/models.vanillatransformer.ipynb @@ -195,15 +195,13 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", - " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", - " 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", " `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
\n", " `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
\n", + " `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
\n", " `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
\n", "\n", "\t*References*
\n", @@ -244,14 +242,12 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", - " prefetch_factor: Optional[int] = None,\n", " drop_last_loader: bool = False,\n", - " pin_memory: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", " lr_scheduler_kwargs = None,\n", + " dataloader_kwargs = None,\n", " **trainer_kwargs):\n", " super(VanillaTransformer, self).__init__(h=h,\n", " input_size=input_size,\n", @@ -272,15 +268,13 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", - " num_workers_loader=num_workers_loader,\n", - " prefetch_factor=prefetch_factor,\n", " drop_last_loader=drop_last_loader,\n", - " pin_memory=pin_memory,\n", " random_seed=random_seed,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", " lr_scheduler_kwargs=lr_scheduler_kwargs,\n", + " dataloader_kwargs=dataloader_kwargs,\n", " **trainer_kwargs)\n", "\n", " # Architecture\n", diff --git a/nbs/tsdataset.ipynb b/nbs/tsdataset.ipynb index 6c7964a25..9ec711b79 100644 --- a/nbs/tsdataset.ipynb +++ b/nbs/tsdataset.ipynb @@ -645,31 +645,25 @@ " dataset: BaseTimeSeriesDataset,\n", " batch_size=32, \n", " valid_batch_size=1024,\n", - " num_workers=0,\n", - " prefetch_factor=None,\n", " drop_last=False,\n", " shuffle_train=True,\n", - " pin_memory=False\n", + " **dataloaders_kwargs\n", " ):\n", " super().__init__()\n", " self.dataset = dataset\n", " self.batch_size = batch_size\n", " self.valid_batch_size = valid_batch_size\n", - " self.num_workers = num_workers\n", - " self.prefetch_factor=prefetch_factor\n", " self.drop_last = drop_last\n", " self.shuffle_train = shuffle_train\n", - " self.pin_memory = pin_memory\n", + " self.dataloaders_kwargs = dataloaders_kwargs\n", " \n", " def train_dataloader(self):\n", " loader = TimeSeriesLoader(\n", " self.dataset,\n", " batch_size=self.batch_size, \n", - " num_workers=self.num_workers,\n", " shuffle=self.shuffle_train,\n", " drop_last=self.drop_last,\n", - " pin_memory=self.pin_memory,\n", - " prefetch_factor=self.prefetch_factor\n", + " **self.dataloaders_kwargs\n", " )\n", " return loader\n", " \n", @@ -677,11 +671,9 @@ " loader = TimeSeriesLoader(\n", " self.dataset, \n", " batch_size=self.valid_batch_size, \n", - " num_workers=self.num_workers,\n", " shuffle=False,\n", " drop_last=self.drop_last,\n", - " pin_memory=self.pin_memory,\n", - " prefetch_factor=self.prefetch_factor\n", + " **self.dataloaders_kwargs\n", " )\n", " return loader\n", " \n", @@ -689,10 +681,8 @@ " loader = TimeSeriesLoader(\n", " self.dataset,\n", " batch_size=self.valid_batch_size, \n", - " num_workers=self.num_workers,\n", " shuffle=False,\n", - " pin_memory=self.pin_memory,\n", - " prefetch_factor=self.prefetch_factor\n", + " **self.dataloaders_kwargs\n", " )\n", " return loader" ] @@ -956,17 +946,17 @@ " dataset: _FilesDataset,\n", " batch_size=32,\n", " valid_batch_size=1024,\n", - " num_workers=0,\n", " drop_last=False,\n", " shuffle_train=True,\n", + " **dataloaders_kwargs\n", " ):\n", " super(TimeSeriesDataModule, self).__init__()\n", " self.files_ds = dataset\n", " self.batch_size = batch_size\n", " self.valid_batch_size = valid_batch_size\n", - " self.num_workers = num_workers\n", " self.drop_last = drop_last\n", " self.shuffle_train = shuffle_train\n", + " self.dataloaders_kwargs = dataloaders_kwargs\n", "\n", " def setup(self, stage):\n", " import torch.distributed as dist\n", diff --git a/neuralforecast/_modidx.py b/neuralforecast/_modidx.py index 1d9130429..25f008ce4 100644 --- a/neuralforecast/_modidx.py +++ b/neuralforecast/_modidx.py @@ -506,7 +506,60 @@ 'neuralforecast/losses/pytorch.py'), 'neuralforecast.losses.pytorch.weighted_average': ( 'losses.pytorch.html#weighted_average', 'neuralforecast/losses/pytorch.py')}, - 'neuralforecast.models.autoformer': {}, + 'neuralforecast.models.autoformer': { 'neuralforecast.models.autoformer.AutoCorrelation': ( 'models.autoformer.html#autocorrelation', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelation.__init__': ( 'models.autoformer.html#autocorrelation.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelation.forward': ( 'models.autoformer.html#autocorrelation.forward', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelation.time_delay_agg_full': ( 'models.autoformer.html#autocorrelation.time_delay_agg_full', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelation.time_delay_agg_inference': ( 'models.autoformer.html#autocorrelation.time_delay_agg_inference', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelation.time_delay_agg_training': ( 'models.autoformer.html#autocorrelation.time_delay_agg_training', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelationLayer': ( 'models.autoformer.html#autocorrelationlayer', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelationLayer.__init__': ( 'models.autoformer.html#autocorrelationlayer.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.AutoCorrelationLayer.forward': ( 'models.autoformer.html#autocorrelationlayer.forward', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Autoformer': ( 'models.autoformer.html#autoformer', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Autoformer.__init__': ( 'models.autoformer.html#autoformer.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Autoformer.forward': ( 'models.autoformer.html#autoformer.forward', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Decoder': ( 'models.autoformer.html#decoder', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Decoder.__init__': ( 'models.autoformer.html#decoder.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Decoder.forward': ( 'models.autoformer.html#decoder.forward', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.DecoderLayer': ( 'models.autoformer.html#decoderlayer', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.DecoderLayer.__init__': ( 'models.autoformer.html#decoderlayer.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.DecoderLayer.forward': ( 'models.autoformer.html#decoderlayer.forward', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Encoder': ( 'models.autoformer.html#encoder', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Encoder.__init__': ( 'models.autoformer.html#encoder.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.Encoder.forward': ( 'models.autoformer.html#encoder.forward', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.EncoderLayer': ( 'models.autoformer.html#encoderlayer', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.EncoderLayer.__init__': ( 'models.autoformer.html#encoderlayer.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.EncoderLayer.forward': ( 'models.autoformer.html#encoderlayer.forward', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.LayerNorm': ( 'models.autoformer.html#layernorm', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.LayerNorm.__init__': ( 'models.autoformer.html#layernorm.__init__', + 'neuralforecast/models/autoformer.py'), + 'neuralforecast.models.autoformer.LayerNorm.forward': ( 'models.autoformer.html#layernorm.forward', + 'neuralforecast/models/autoformer.py')}, 'neuralforecast.models.bitcn': { 'neuralforecast.models.bitcn.BiTCN': ( 'models.bitcn.html#bitcn', 'neuralforecast/models/bitcn.py'), 'neuralforecast.models.bitcn.BiTCN.__init__': ( 'models.bitcn.html#bitcn.__init__', diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py index ab640cb7e..b17a90efa 100644 --- a/neuralforecast/common/_base_model.py +++ b/neuralforecast/common/_base_model.py @@ -336,11 +336,9 @@ def _fit( dataset=dataset, batch_size=batch_size, valid_batch_size=valid_batch_size, - num_workers=self.num_workers_loader, - prefetch_factor=self.prefetch_factor, drop_last=self.drop_last_loader, shuffle_train=shuffle_train, - pin_memory=self.pin_memory, + **self.dataloader_kwargs, ) if self.val_check_steps > self.max_steps: diff --git a/neuralforecast/common/_base_multivariate.py b/neuralforecast/common/_base_multivariate.py index e068ade73..a1f8a51ac 100644 --- a/neuralforecast/common/_base_multivariate.py +++ b/neuralforecast/common/_base_multivariate.py @@ -46,16 +46,14 @@ def __init__( futr_exog_list=None, hist_exog_list=None, stat_exog_list=None, - num_workers_loader=0, - prefetch_factor=None, drop_last_loader=False, - pin_memory=False, random_seed=1, alias=None, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super().__init__( @@ -126,10 +124,8 @@ def __init__( self.decompose_forecast = False # DataModule arguments - self.num_workers_loader = num_workers_loader - self.prefetch_factor = prefetch_factor + self.dataloader_kwargs = dataloader_kwargs self.drop_last_loader = drop_last_loader - self.pin_memory = pin_memory # used by on_validation_epoch_end hook self.validation_step_outputs = [] self.alias = alias diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index 2de61d8d3..48be3fa2a 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -46,16 +46,14 @@ def __init__( futr_exog_list=None, hist_exog_list=None, stat_exog_list=None, - num_workers_loader=0, - prefetch_factor=None, drop_last_loader=False, - pin_memory=False, random_seed=1, alias=None, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super().__init__( @@ -119,10 +117,8 @@ def __init__( self.test_size = 0 # DataModule arguments - self.num_workers_loader = num_workers_loader - self.prefetch_factor = prefetch_factor + self.dataloader_kwargs = dataloader_kwargs self.drop_last_loader = drop_last_loader - self.pin_memory = pin_memory # used by on_validation_epoch_end hook self.validation_step_outputs = [] self.alias = alias @@ -577,10 +573,7 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): datamodule = TimeSeriesDataModule( dataset=dataset, valid_batch_size=self.valid_batch_size, - num_workers=self.num_workers_loader, - prefetch_factor=self.prefetch_factor, - pin_memory=self.pin_memory, - **data_module_kwargs, + **self.dataloader_kwargs, ) fcsts = trainer.predict(self, datamodule=datamodule) if self.test_size > 0: diff --git a/neuralforecast/common/_base_windows.py b/neuralforecast/common/_base_windows.py index 1325d61a3..74a81b95a 100644 --- a/neuralforecast/common/_base_windows.py +++ b/neuralforecast/common/_base_windows.py @@ -49,16 +49,14 @@ def __init__( hist_exog_list=None, stat_exog_list=None, exclude_insample_y=False, - num_workers_loader=0, - prefetch_factor=None, drop_last_loader=False, - pin_memory=False, random_seed=1, alias=None, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super().__init__( @@ -130,10 +128,8 @@ def __init__( self.decompose_forecast = False # DataModule arguments - self.num_workers_loader = num_workers_loader - self.prefetch_factor = prefetch_factor + self.dataloader_kwargs = dataloader_kwargs self.drop_last_loader = drop_last_loader - self.pin_memory = pin_memory # used by on_validation_epoch_end hook self.validation_step_outputs = [] self.alias = alias diff --git a/neuralforecast/models/autoformer.py b/neuralforecast/models/autoformer.py index e83933051..069e3641d 100644 --- a/neuralforecast/models/autoformer.py +++ b/neuralforecast/models/autoformer.py @@ -439,15 +439,13 @@ class Autoformer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -493,14 +491,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super(Autoformer, self).__init__( @@ -524,15 +520,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/models/bitcn.py b/neuralforecast/models/bitcn.py index a91e1a67f..cf4fc91df 100644 --- a/neuralforecast/models/bitcn.py +++ b/neuralforecast/models/bitcn.py @@ -113,15 +113,13 @@ class BiTCN(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -160,14 +158,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(BiTCN, self).__init__( @@ -192,14 +188,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/deepar.py b/neuralforecast/models/deepar.py index 9fda0c8e7..6b16f51d1 100644 --- a/neuralforecast/models/deepar.py +++ b/neuralforecast/models/deepar.py @@ -84,15 +84,13 @@ class DeepAR(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -138,14 +136,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -187,15 +183,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py index c39b48eb4..6d7194227 100644 --- a/neuralforecast/models/deepnpts.py +++ b/neuralforecast/models/deepnpts.py @@ -46,8 +46,6 @@ class DeepNPTS(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
@@ -55,6 +53,7 @@ class DeepNPTS(BaseWindows): `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -95,14 +94,12 @@ def __init__( step_size: int = 1, scaler_type: str = "standard", random_seed: int = 1, - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -141,15 +138,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/dilated_rnn.py b/neuralforecast/models/dilated_rnn.py index 006d0bb10..96094c961 100644 --- a/neuralforecast/models/dilated_rnn.py +++ b/neuralforecast/models/dilated_rnn.py @@ -314,15 +314,13 @@ class DilatedRNN(BaseRecurrent): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -358,14 +356,12 @@ def __init__( step_size: int = 1, scaler_type: str = "robust", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(DilatedRNN, self).__init__( @@ -385,15 +381,13 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/dlinear.py b/neuralforecast/models/dlinear.py index 22dcdadc8..3af5f11c0 100644 --- a/neuralforecast/models/dlinear.py +++ b/neuralforecast/models/dlinear.py @@ -72,15 +72,13 @@ class DLinear(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -117,14 +115,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(DLinear, self).__init__( @@ -148,15 +144,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/fedformer.py b/neuralforecast/models/fedformer.py index 182901994..7cfe3c5a6 100644 --- a/neuralforecast/models/fedformer.py +++ b/neuralforecast/models/fedformer.py @@ -437,15 +437,13 @@ class FEDformer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -490,14 +488,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super(FEDformer, self).__init__( @@ -520,15 +516,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) # Architecture diff --git a/neuralforecast/models/gru.py b/neuralforecast/models/gru.py index ee545e572..5fa61d0b5 100644 --- a/neuralforecast/models/gru.py +++ b/neuralforecast/models/gru.py @@ -48,15 +48,13 @@ class GRU(BaseRecurrent): `valid_batch_size`: int=None, number of different series in each validation and test batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -93,14 +91,12 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed=1, - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(GRU, self).__init__( @@ -120,15 +116,13 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/informer.py b/neuralforecast/models/informer.py index 2e9d44d2a..cb4ff2622 100644 --- a/neuralforecast/models/informer.py +++ b/neuralforecast/models/informer.py @@ -223,15 +223,13 @@ class Informer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -277,14 +275,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super(Informer, self).__init__( @@ -308,15 +304,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/models/itransformer.py b/neuralforecast/models/itransformer.py index a59021904..121eac2b5 100644 --- a/neuralforecast/models/itransformer.py +++ b/neuralforecast/models/itransformer.py @@ -102,9 +102,6 @@ def forward(self, x, x_mark): return self.dropout(x) # %% ../../nbs/models.itransformer.ipynb 13 -from typing import Optional - - class iTransformer(BaseMultivariate): """iTransformer @@ -134,15 +131,13 @@ class iTransformer(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -182,14 +177,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -211,14 +204,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/kan.py b/neuralforecast/models/kan.py index 767b299dd..e442fdbd4 100644 --- a/neuralforecast/models/kan.py +++ b/neuralforecast/models/kan.py @@ -281,13 +281,11 @@ class KAN(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -333,12 +331,10 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -364,13 +360,11 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/lstm.py b/neuralforecast/models/lstm.py index 7c65bbf95..bb8906b8d 100644 --- a/neuralforecast/models/lstm.py +++ b/neuralforecast/models/lstm.py @@ -47,15 +47,13 @@ class LSTM(BaseRecurrent): `valid_batch_size`: int=None, number of different series in each validation and test batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -91,14 +89,12 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed=1, - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(LSTM, self).__init__( @@ -118,15 +114,13 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/mlp.py b/neuralforecast/models/mlp.py index 9b20aec30..535c41424 100644 --- a/neuralforecast/models/mlp.py +++ b/neuralforecast/models/mlp.py @@ -46,15 +46,13 @@ class MLP(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -89,14 +87,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -122,15 +118,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/mlpmultivariate.py b/neuralforecast/models/mlpmultivariate.py index 406c2b884..f03ec7222 100644 --- a/neuralforecast/models/mlpmultivariate.py +++ b/neuralforecast/models/mlpmultivariate.py @@ -11,9 +11,6 @@ from ..common._base_multivariate import BaseMultivariate # %% ../../nbs/models.mlpmultivariate.ipynb 6 -from typing import Optional - - class MLPMultivariate(BaseMultivariate): """MLPMultivariate @@ -43,15 +40,13 @@ class MLPMultivariate(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -82,14 +77,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -111,15 +104,13 @@ def __init__( batch_size=batch_size, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/nbeats.py b/neuralforecast/models/nbeats.py index 62e1da90e..1fb4f07b8 100644 --- a/neuralforecast/models/nbeats.py +++ b/neuralforecast/models/nbeats.py @@ -225,15 +225,13 @@ class NBEATS(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -274,14 +272,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): @@ -309,15 +305,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/models/nbeatsx.py b/neuralforecast/models/nbeatsx.py index 04e6447b1..10e37f608 100644 --- a/neuralforecast/models/nbeatsx.py +++ b/neuralforecast/models/nbeatsx.py @@ -312,15 +312,13 @@ class NBEATSx(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random seed initialization for replicability.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -365,14 +363,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): # Protect horizon collapsed seasonality and trend NBEATSx-i basis @@ -403,15 +399,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/models/nhits.py b/neuralforecast/models/nhits.py index 4574d86a5..f16db81a3 100644 --- a/neuralforecast/models/nhits.py +++ b/neuralforecast/models/nhits.py @@ -223,15 +223,13 @@ class NHITS(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -278,14 +276,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): @@ -311,15 +307,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/models/nlinear.py b/neuralforecast/models/nlinear.py index d453e5373..4bad929b1 100644 --- a/neuralforecast/models/nlinear.py +++ b/neuralforecast/models/nlinear.py @@ -36,15 +36,13 @@ class NLinear(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -80,14 +78,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(NLinear, self).__init__( @@ -111,15 +107,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/patchtst.py b/neuralforecast/models/patchtst.py index 5691c4a9c..25770b71c 100644 --- a/neuralforecast/models/patchtst.py +++ b/neuralforecast/models/patchtst.py @@ -833,15 +833,13 @@ class PatchTST(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -894,14 +892,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(PatchTST, self).__init__( @@ -925,15 +921,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/rmok.py b/neuralforecast/models/rmok.py index 14f76de00..fc66483d6 100644 --- a/neuralforecast/models/rmok.py +++ b/neuralforecast/models/rmok.py @@ -256,9 +256,6 @@ def forward(self, x): return y # %% ../../nbs/models.rmok.ipynb 14 -from typing import Optional - - class RMoK(BaseMultivariate): """Reversible Mixture of KAN **Parameters**
@@ -284,15 +281,13 @@ class RMoK(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
Reference
@@ -329,14 +324,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -358,14 +351,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/rnn.py b/neuralforecast/models/rnn.py index f8cf8ef98..d3f8b4fff 100644 --- a/neuralforecast/models/rnn.py +++ b/neuralforecast/models/rnn.py @@ -48,14 +48,12 @@ class RNN(BaseRecurrent): `valid_batch_size`: int=None, number of different series in each validation and test batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`alias`: str, optional, Custom name of the model.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
@@ -94,14 +92,12 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed=1, - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(RNN, self).__init__( @@ -121,15 +117,13 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/softs.py b/neuralforecast/models/softs.py index a42b2a710..a40f32beb 100644 --- a/neuralforecast/models/softs.py +++ b/neuralforecast/models/softs.py @@ -79,9 +79,6 @@ def forward(self, input, *args, **kwargs): return output, None # %% ../../nbs/models.softs.ipynb 10 -from typing import Optional - - class SOFTS(BaseMultivariate): """SOFTS @@ -109,15 +106,13 @@ class SOFTS(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -155,14 +150,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -184,14 +177,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/stemgnn.py b/neuralforecast/models/stemgnn.py index 1db823c6c..69cdc4ef5 100644 --- a/neuralforecast/models/stemgnn.py +++ b/neuralforecast/models/stemgnn.py @@ -136,9 +136,6 @@ def forward(self, x, mul_L): return forecast, backcast_source # %% ../../nbs/models.stemgnn.ipynb 9 -from typing import Optional - - class StemGNN(BaseMultivariate): """StemGNN @@ -169,15 +166,13 @@ class StemGNN(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -210,14 +205,12 @@ def __init__( step_size: int = 1, scaler_type: str = "robust", random_seed: int = 1, - num_workers_loader=0, - prefetch_factor=None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -239,15 +232,13 @@ def __init__( batch_size=batch_size, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) # Quick fix for now, fix the model later. diff --git a/neuralforecast/models/tcn.py b/neuralforecast/models/tcn.py index 182b68195..70dd9c37f 100644 --- a/neuralforecast/models/tcn.py +++ b/neuralforecast/models/tcn.py @@ -44,15 +44,13 @@ class TCN(BaseRecurrent): `val_check_steps`: int=100, Number of training steps between every validation loss check.
`batch_size`: int=32, number of differentseries in each batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
""" @@ -88,14 +86,12 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed: int = 1, - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(TCN, self).__init__( @@ -115,15 +111,13 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/tft.py b/neuralforecast/models/tft.py index d4fe3376e..616496fc0 100644 --- a/neuralforecast/models/tft.py +++ b/neuralforecast/models/tft.py @@ -454,15 +454,13 @@ class TFT(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random seed initialization for replicability.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -503,15 +501,13 @@ def __init__( start_padding_enabled=False, step_size: int = 1, scaler_type: str = "robust", - num_workers_loader=0, - prefetch_factor: Optional[int] = None, drop_last_loader=False, - pin_memory=False, random_seed: int = 1, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): @@ -536,15 +532,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) self.example_length = input_size + h diff --git a/neuralforecast/models/tide.py b/neuralforecast/models/tide.py index 2d20e6fd5..1f8f7144f 100644 --- a/neuralforecast/models/tide.py +++ b/neuralforecast/models/tide.py @@ -78,15 +78,13 @@ class TiDE(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -131,14 +129,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -165,14 +161,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) self.h = h diff --git a/neuralforecast/models/timellm.py b/neuralforecast/models/timellm.py index e515cb807..bec5fb453 100644 --- a/neuralforecast/models/timellm.py +++ b/neuralforecast/models/timellm.py @@ -211,15 +211,13 @@ class TimeLLM(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -270,15 +268,13 @@ def __init__( num_lr_decays: int = 0, early_stop_patience_steps: int = -1, scaler_type: str = "identity", - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, random_seed: int = 1, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super(TimeLLM, self).__init__( @@ -301,15 +297,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/models/timemixer.py b/neuralforecast/models/timemixer.py index 9f632a1a8..cdaea20bc 100644 --- a/neuralforecast/models/timemixer.py +++ b/neuralforecast/models/timemixer.py @@ -249,9 +249,6 @@ def forward(self, x_list): return out_list # %% ../../nbs/models.timemixer.ipynb 12 -from typing import Optional - - class TimeMixer(BaseMultivariate): """TimeMixer **Parameters**
@@ -285,15 +282,13 @@ class TimeMixer(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References**
@@ -338,14 +333,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): @@ -367,14 +360,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/models/timesnet.py b/neuralforecast/models/timesnet.py index 8b0ff9257..9089a8796 100644 --- a/neuralforecast/models/timesnet.py +++ b/neuralforecast/models/timesnet.py @@ -178,20 +178,16 @@ class TimesNet(BaseWindows): Type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
random_seed : int (default=1) Random_seed for pytorch initializer and numpy generators. - num_workers_loader : int (default=0) - Workers to be used by `TimeSeriesDataLoader`. - 'prefetch_factor': int (default=None) - Number of batches to be prefetched by the worker. drop_last_loader : bool (default=False) If True `TimeSeriesDataLoader` drops last non-full batch. - `pin_memory`: bool (default=False) - If True `TimeSeriesDataLoader` uses pinned memory. `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None) User specified optimizer instead of the default choice (Adam). `optimizer_kwargs`: dict, optional (defualt=None) List of parameters used by the user specified `optimizer`. `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional (default=None) + List of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
**trainer_kwargs Keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer) @@ -235,14 +231,12 @@ def __init__( step_size: int = 1, scaler_type: str = "standard", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): super(TimesNet, self).__init__( @@ -266,15 +260,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/tsmixer.py b/neuralforecast/models/tsmixer.py index 605e2d0ff..17fae38be 100644 --- a/neuralforecast/models/tsmixer.py +++ b/neuralforecast/models/tsmixer.py @@ -130,9 +130,6 @@ def reverse(self, x): return x # %% ../../nbs/models.tsmixer.ipynb 12 -from typing import Optional - - class TSMixer(BaseMultivariate): """TSMixer @@ -160,15 +157,13 @@ class TSMixer(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -205,14 +200,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -235,14 +228,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) diff --git a/neuralforecast/models/tsmixerx.py b/neuralforecast/models/tsmixerx.py index 7997ed281..97747bbb4 100644 --- a/neuralforecast/models/tsmixerx.py +++ b/neuralforecast/models/tsmixerx.py @@ -158,9 +158,6 @@ def reverse(self, x): return x # %% ../../nbs/models.tsmixerx.ipynb 12 -from typing import Optional - - class TSMixerx(BaseMultivariate): """TSMixerx @@ -188,15 +185,13 @@ class TSMixerx(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
**References:**
@@ -233,14 +228,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs ): @@ -263,14 +256,12 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs ) # Reversible InstanceNormalization layer diff --git a/neuralforecast/models/vanillatransformer.py b/neuralforecast/models/vanillatransformer.py index 640cd67ce..e38c03fc9 100644 --- a/neuralforecast/models/vanillatransformer.py +++ b/neuralforecast/models/vanillatransformer.py @@ -114,15 +114,13 @@ class VanillaTransformer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
- `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
- 'prefetch_factor': int=None, number of batches to be prefetched by the worker.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
`lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).
`lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.
+ `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`.
`**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).
*References*
@@ -165,14 +163,12 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, - prefetch_factor: Optional[int] = None, drop_last_loader: bool = False, - pin_memory: bool = False, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, lr_scheduler_kwargs=None, + dataloader_kwargs=None, **trainer_kwargs, ): super(VanillaTransformer, self).__init__( @@ -195,15 +191,13 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, - num_workers_loader=num_workers_loader, - prefetch_factor=prefetch_factor, drop_last_loader=drop_last_loader, - pin_memory=pin_memory, random_seed=random_seed, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, lr_scheduler_kwargs=lr_scheduler_kwargs, + dataloader_kwargs=dataloader_kwargs, **trainer_kwargs, ) diff --git a/neuralforecast/tsdataset.py b/neuralforecast/tsdataset.py index 4ef58772b..ccf66af9f 100644 --- a/neuralforecast/tsdataset.py +++ b/neuralforecast/tsdataset.py @@ -1,5 +1,3 @@ -"""Torch Dataset for Time Series""" - # AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/tsdataset.ipynb. # %% auto 0 @@ -588,31 +586,25 @@ def __init__( dataset: BaseTimeSeriesDataset, batch_size=32, valid_batch_size=1024, - num_workers=0, - prefetch_factor=None, drop_last=False, shuffle_train=True, - pin_memory=False, + **dataloaders_kwargs ): super().__init__() self.dataset = dataset self.batch_size = batch_size self.valid_batch_size = valid_batch_size - self.num_workers = num_workers - self.prefetch_factor = prefetch_factor self.drop_last = drop_last self.shuffle_train = shuffle_train - self.pin_memory = pin_memory + self.dataloaders_kwargs = dataloaders_kwargs def train_dataloader(self): loader = TimeSeriesLoader( self.dataset, batch_size=self.batch_size, - num_workers=self.num_workers, shuffle=self.shuffle_train, drop_last=self.drop_last, - pin_memory=self.pin_memory, - prefetch_factor=self.prefetch_factor, + **self.dataloaders_kwargs ) return loader @@ -620,11 +612,9 @@ def val_dataloader(self): loader = TimeSeriesLoader( self.dataset, batch_size=self.valid_batch_size, - num_workers=self.num_workers, shuffle=False, drop_last=self.drop_last, - pin_memory=self.pin_memory, - prefetch_factor=self.prefetch_factor, + **self.dataloaders_kwargs ) return loader @@ -632,10 +622,8 @@ def predict_dataloader(self): loader = TimeSeriesLoader( self.dataset, batch_size=self.valid_batch_size, - num_workers=self.num_workers, shuffle=False, - pin_memory=self.pin_memory, - prefetch_factor=self.prefetch_factor, + **self.dataloaders_kwargs ) return loader @@ -646,17 +634,17 @@ def __init__( dataset: _FilesDataset, batch_size=32, valid_batch_size=1024, - num_workers=0, drop_last=False, shuffle_train=True, + **dataloaders_kwargs ): super(TimeSeriesDataModule, self).__init__() self.files_ds = dataset self.batch_size = batch_size self.valid_batch_size = valid_batch_size - self.num_workers = num_workers self.drop_last = drop_last self.shuffle_train = shuffle_train + self.dataloaders_kwargs = dataloaders_kwargs def setup(self, stage): import torch.distributed as dist From 9ea94b7ee0bc652eacdca098ebfe107b82fe844f Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Tue, 22 Oct 2024 11:06:55 +1100 Subject: [PATCH 04/11] neatening code --- nbs/common.base_multivariate.ipynb | 2 +- nbs/common.base_recurrent.ipynb | 6 +++--- nbs/models.deepnpts.ipynb | 1 - nbs/models.timemixer.ipynb | 1 + neuralforecast/common/_base_recurrent.py | 6 ++++-- neuralforecast/models/deepnpts.py | 1 - 6 files changed, 9 insertions(+), 8 deletions(-) diff --git a/nbs/common.base_multivariate.ipynb b/nbs/common.base_multivariate.ipynb index 647923866..962d0c6df 100644 --- a/nbs/common.base_multivariate.ipynb +++ b/nbs/common.base_multivariate.ipynb @@ -172,7 +172,7 @@ " self.decompose_forecast = False\n", "\n", " # DataModule arguments\n", - " self.dataloader_kwargs=dataloader_kwargs\n", + " self.dataloader_kwargs = dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index edb5c4cdb..572b2e577 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -171,7 +171,7 @@ " self.test_size = 0\n", "\n", " # DataModule arguments\n", - " self.dataloader_kwargs=dataloader_kwargs\n", + " self.dataloader_kwargs = dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", " # used by on_validation_epoch_end hook\n", " self.validation_step_outputs = []\n", @@ -535,7 +535,7 @@ " \"\"\"\n", " self._check_exog(dataset)\n", " self._restart_seed(random_seed)\n", - " data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs)\n", + " data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs) | self.dataloader_kwargs\n", "\n", " if step_size > 1:\n", " raise Exception('Recurrent models do not support step_size > 1')\n", @@ -551,7 +551,7 @@ " datamodule = TimeSeriesDataModule(\n", " dataset=dataset,\n", " valid_batch_size=self.valid_batch_size,\n", - " **self.dataloader_kwargs\n", + " **data_module_kwargs\n", " )\n", " fcsts = trainer.predict(self, datamodule=datamodule)\n", " if self.test_size > 0:\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index 0b0fe0620..8f63edbb7 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -119,7 +119,6 @@ " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", - " `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", diff --git a/nbs/models.timemixer.ipynb b/nbs/models.timemixer.ipynb index 485d971eb..10544dbe2 100644 --- a/nbs/models.timemixer.ipynb +++ b/nbs/models.timemixer.ipynb @@ -323,6 +323,7 @@ "outputs": [], "source": [ "#| export\n", + "\n", "class TimeMixer(BaseMultivariate):\n", " \"\"\" TimeMixer\n", " **Parameters**
\n", diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index 48be3fa2a..c8a2e6014 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -555,7 +555,9 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): """ self._check_exog(dataset) self._restart_seed(random_seed) - data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs) + data_module_kwargs = ( + self._set_quantile_for_iqloss(**data_module_kwargs) | self.dataloader_kwargs + ) if step_size > 1: raise Exception("Recurrent models do not support step_size > 1") @@ -573,7 +575,7 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): datamodule = TimeSeriesDataModule( dataset=dataset, valid_batch_size=self.valid_batch_size, - **self.dataloader_kwargs, + **data_module_kwargs, ) fcsts = trainer.predict(self, datamodule=datamodule) if self.test_size > 0: diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py index 6d7194227..3edeb0596 100644 --- a/neuralforecast/models/deepnpts.py +++ b/neuralforecast/models/deepnpts.py @@ -47,7 +47,6 @@ class DeepNPTS(BaseWindows): `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
- `pin_memory`: bool=False, if True `TimeSeriesDataLoader` uses pinned memory.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
From 87bee95a2bd7d81c1788089c3b9d367b537ef2f8 Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Tue, 22 Oct 2024 11:15:09 +1100 Subject: [PATCH 05/11] fix tests --- nbs/common.base_model.ipynb | 4 +++- neuralforecast/common/_base_model.py | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb index 0f1daeeb5..8334c759c 100644 --- a/nbs/common.base_model.ipynb +++ b/nbs/common.base_model.ipynb @@ -358,13 +358,15 @@ " datamodule_constructor = TimeSeriesDataModule\n", " else:\n", " datamodule_constructor = _DistributedTimeSeriesDataModule\n", + " \n", + " dataloader_kwargs = self.dataloader_kwargs if self.dataloader_kwargs is not None else {}\n", " datamodule = datamodule_constructor(\n", " dataset=dataset, \n", " batch_size=batch_size,\n", " valid_batch_size=valid_batch_size,\n", " drop_last=self.drop_last_loader,\n", " shuffle_train=shuffle_train,\n", - " **self.dataloader_kwargs\n", + " **dataloader_kwargs\n", " )\n", "\n", " if self.val_check_steps > self.max_steps:\n", diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py index b17a90efa..fdad6184f 100644 --- a/neuralforecast/common/_base_model.py +++ b/neuralforecast/common/_base_model.py @@ -332,13 +332,17 @@ def _fit( datamodule_constructor = TimeSeriesDataModule else: datamodule_constructor = _DistributedTimeSeriesDataModule + + dataloader_kwargs = ( + self.dataloader_kwargs if self.dataloader_kwargs is not None else {} + ) datamodule = datamodule_constructor( dataset=dataset, batch_size=batch_size, valid_batch_size=valid_batch_size, drop_last=self.drop_last_loader, shuffle_train=shuffle_train, - **self.dataloader_kwargs, + **dataloader_kwargs, ) if self.val_check_steps > self.max_steps: From 484365c031e215fa43034ba369ec141a970675fe Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Tue, 22 Oct 2024 11:32:01 +1100 Subject: [PATCH 06/11] another test fix --- nbs/common.base_recurrent.ipynb | 3 ++- neuralforecast/common/_base_recurrent.py | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index 572b2e577..b713e9866 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -535,7 +535,8 @@ " \"\"\"\n", " self._check_exog(dataset)\n", " self._restart_seed(random_seed)\n", - " data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs) | self.dataloader_kwargs\n", + " data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs)\n", + " data_module_kwargs = self.dataloader_kwargs.update(data_module_kwargs) if self.dataloader_kwargs is not None else data_module_kwargs\n", "\n", " if step_size > 1:\n", " raise Exception('Recurrent models do not support step_size > 1')\n", diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index c8a2e6014..8331a63a0 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -555,8 +555,11 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): """ self._check_exog(dataset) self._restart_seed(random_seed) + data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs) data_module_kwargs = ( - self._set_quantile_for_iqloss(**data_module_kwargs) | self.dataloader_kwargs + self.dataloader_kwargs.update(data_module_kwargs) + if self.dataloader_kwargs is not None + else data_module_kwargs ) if step_size > 1: From 3aaef27f5850204d437f012d50fb8024bd4341fe Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Thu, 24 Oct 2024 15:07:34 +1100 Subject: [PATCH 07/11] add back in num_workers --- nbs/common.base_model.ipynb | 10 ++++++++++ nbs/common.base_multivariate.ipynb | 2 ++ nbs/common.base_recurrent.ipynb | 14 ++++++++++++++ nbs/common.base_windows.ipynb | 2 ++ nbs/models.autoformer.ipynb | 3 +++ nbs/models.bitcn.ipynb | 3 +++ nbs/models.deepar.ipynb | 3 +++ nbs/models.deepnpts.ipynb | 3 +++ nbs/models.dilated_rnn.ipynb | 3 +++ nbs/models.dlinear.ipynb | 3 +++ nbs/models.fedformer.ipynb | 3 +++ nbs/models.gru.ipynb | 3 +++ nbs/models.informer.ipynb | 3 +++ nbs/models.itransformer.ipynb | 3 +++ nbs/models.kan.ipynb | 3 +++ nbs/models.lstm.ipynb | 3 +++ nbs/models.mlp.ipynb | 3 +++ nbs/models.mlpmultivariate.ipynb | 3 +++ nbs/models.nbeats.ipynb | 3 +++ nbs/models.nbeatsx.ipynb | 3 +++ nbs/models.nhits.ipynb | 3 +++ nbs/models.nlinear.ipynb | 3 +++ nbs/models.patchtst.ipynb | 3 +++ nbs/models.rmok.ipynb | 3 +++ nbs/models.rnn.ipynb | 3 +++ nbs/models.softs.ipynb | 3 +++ nbs/models.stemgnn.ipynb | 3 +++ nbs/models.tcn.ipynb | 3 +++ nbs/models.tft.ipynb | 3 +++ nbs/models.tide.ipynb | 3 +++ nbs/models.timellm.ipynb | 3 +++ nbs/models.timemixer.ipynb | 3 +++ nbs/models.timesnet.ipynb | 6 +++++- nbs/models.tsmixer.ipynb | 3 +++ nbs/models.tsmixerx.ipynb | 3 +++ nbs/models.vanillatransformer.ipynb | 3 +++ neuralforecast/common/_base_model.py | 10 ++++++++++ neuralforecast/common/_base_multivariate.py | 2 ++ neuralforecast/common/_base_recurrent.py | 14 ++++++++++++++ neuralforecast/common/_base_windows.py | 2 ++ neuralforecast/models/autoformer.py | 3 +++ neuralforecast/models/bitcn.py | 3 +++ neuralforecast/models/deepar.py | 3 +++ neuralforecast/models/deepnpts.py | 3 +++ neuralforecast/models/dilated_rnn.py | 3 +++ neuralforecast/models/dlinear.py | 3 +++ neuralforecast/models/fedformer.py | 3 +++ neuralforecast/models/gru.py | 3 +++ neuralforecast/models/informer.py | 3 +++ neuralforecast/models/itransformer.py | 3 +++ neuralforecast/models/kan.py | 3 +++ neuralforecast/models/lstm.py | 3 +++ neuralforecast/models/mlp.py | 3 +++ neuralforecast/models/mlpmultivariate.py | 3 +++ neuralforecast/models/nbeats.py | 3 +++ neuralforecast/models/nbeatsx.py | 3 +++ neuralforecast/models/nhits.py | 3 +++ neuralforecast/models/nlinear.py | 3 +++ neuralforecast/models/patchtst.py | 3 +++ neuralforecast/models/rmok.py | 3 +++ neuralforecast/models/rnn.py | 3 +++ neuralforecast/models/softs.py | 3 +++ neuralforecast/models/stemgnn.py | 3 +++ neuralforecast/models/tcn.py | 3 +++ neuralforecast/models/tft.py | 3 +++ neuralforecast/models/tide.py | 3 +++ neuralforecast/models/timellm.py | 3 +++ neuralforecast/models/timemixer.py | 3 +++ neuralforecast/models/timesnet.py | 6 +++++- neuralforecast/models/tsmixer.py | 3 +++ neuralforecast/models/tsmixerx.py | 3 +++ neuralforecast/models/vanillatransformer.py | 3 +++ 72 files changed, 252 insertions(+), 2 deletions(-) diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb index 8334c759c..fabd78bbf 100644 --- a/nbs/common.base_model.ipynb +++ b/nbs/common.base_model.ipynb @@ -360,6 +360,16 @@ " datamodule_constructor = _DistributedTimeSeriesDataModule\n", " \n", " dataloader_kwargs = self.dataloader_kwargs if self.dataloader_kwargs is not None else {}\n", + " \n", + " if self.num_workers_loader != 0: # value is not at its default\n", + " warnings.warn(\n", + " \"The `num_workers_loader` argument is deprecated and will be removed in a future version. \"\n", + " \"Please provide num_workers through `dataloader_kwargs`, e.g. \"\n", + " f\"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`\",\n", + " category=FutureWarning,\n", + " )\n", + " dataloader_kwargs['num_workers'] = self.num_workers_loader\n", + "\n", " datamodule = datamodule_constructor(\n", " dataset=dataset, \n", " batch_size=batch_size,\n", diff --git a/nbs/common.base_multivariate.ipynb b/nbs/common.base_multivariate.ipynb index 962d0c6df..f1321600d 100644 --- a/nbs/common.base_multivariate.ipynb +++ b/nbs/common.base_multivariate.ipynb @@ -101,6 +101,7 @@ " futr_exog_list=None,\n", " hist_exog_list=None,\n", " stat_exog_list=None,\n", + " num_workers_loader=0,\n", " drop_last_loader=False,\n", " random_seed=1, \n", " alias=None,\n", @@ -172,6 +173,7 @@ " self.decompose_forecast = False\n", "\n", " # DataModule arguments\n", + " self.num_workers_loader = num_workers_loader\n", " self.dataloader_kwargs = dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", " # used by on_validation_epoch_end hook\n", diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index b713e9866..d4091185e 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -79,6 +79,9 @@ "outputs": [], "source": [ "#| export\n", + "import warnings\n", + "\n", + "\n", "class BaseRecurrent(BaseModel):\n", " \"\"\" Base Recurrent\n", " \n", @@ -107,6 +110,7 @@ " futr_exog_list=None,\n", " hist_exog_list=None,\n", " stat_exog_list=None,\n", + " num_workers_loader=0,\n", " drop_last_loader=False,\n", " random_seed=1, \n", " alias=None,\n", @@ -171,6 +175,7 @@ " self.test_size = 0\n", "\n", " # DataModule arguments\n", + " self.num_workers_loader = num_workers_loader\n", " self.dataloader_kwargs = dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", " # used by on_validation_epoch_end hook\n", @@ -538,6 +543,15 @@ " data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs)\n", " data_module_kwargs = self.dataloader_kwargs.update(data_module_kwargs) if self.dataloader_kwargs is not None else data_module_kwargs\n", "\n", + " if self.num_workers_loader != 0: # value is not at its default\n", + " warnings.warn(\n", + " \"The `num_workers_loader` argument is deprecated and will be removed in a future version. \"\n", + " \"Please provide num_workers through `dataloader_kwargs`, e.g. \"\n", + " f\"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`\",\n", + " category=FutureWarning,\n", + " ) \n", + " data_module_kwargs['num_workers'] = self.num_workers_loader\n", + " \n", " if step_size > 1:\n", " raise Exception('Recurrent models do not support step_size > 1')\n", "\n", diff --git a/nbs/common.base_windows.ipynb b/nbs/common.base_windows.ipynb index 72a9cfeb4..80f12e5f5 100644 --- a/nbs/common.base_windows.ipynb +++ b/nbs/common.base_windows.ipynb @@ -111,6 +111,7 @@ " hist_exog_list=None,\n", " stat_exog_list=None,\n", " exclude_insample_y=False,\n", + " num_workers_loader=0,\n", " drop_last_loader=False,\n", " random_seed=1,\n", " alias=None,\n", @@ -187,6 +188,7 @@ " self.decompose_forecast = False\n", "\n", " # DataModule arguments\n", + " self.num_workers_loader = num_workers_loader\n", " self.dataloader_kwargs = dataloader_kwargs\n", " self.drop_last_loader = drop_last_loader\n", " # used by on_validation_epoch_end hook\n", diff --git a/nbs/models.autoformer.ipynb b/nbs/models.autoformer.ipynb index 0badf8506..e16506fcd 100644 --- a/nbs/models.autoformer.ipynb +++ b/nbs/models.autoformer.ipynb @@ -455,6 +455,7 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -505,6 +506,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -534,6 +536,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.bitcn.ipynb b/nbs/models.bitcn.ipynb index 14f8ee602..cd78bb194 100644 --- a/nbs/models.bitcn.ipynb +++ b/nbs/models.bitcn.ipynb @@ -175,6 +175,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -218,6 +219,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -247,6 +249,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.deepar.ipynb b/nbs/models.deepar.ipynb index 29fcb12bb..777563823 100644 --- a/nbs/models.deepar.ipynb +++ b/nbs/models.deepar.ipynb @@ -180,6 +180,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -228,6 +229,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -271,6 +273,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index 8f63edbb7..7b44f7ef5 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -118,6 +118,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -163,6 +164,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'standard',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -203,6 +205,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.dilated_rnn.ipynb b/nbs/models.dilated_rnn.ipynb index 306292527..25ac95d3a 100644 --- a/nbs/models.dilated_rnn.ipynb +++ b/nbs/models.dilated_rnn.ipynb @@ -387,6 +387,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -427,6 +428,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'robust',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -453,6 +455,7 @@ " stat_exog_list=stat_exog_list,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.dlinear.ipynb b/nbs/models.dlinear.ipynb index 994ea0e1f..f94d0cc7e 100644 --- a/nbs/models.dlinear.ipynb +++ b/nbs/models.dlinear.ipynb @@ -159,6 +159,7 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -200,6 +201,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -229,6 +231,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.fedformer.ipynb b/nbs/models.fedformer.ipynb index 7dcb16553..a6cb7b4dc 100644 --- a/nbs/models.fedformer.ipynb +++ b/nbs/models.fedformer.ipynb @@ -448,6 +448,7 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -497,6 +498,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", @@ -525,6 +527,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.gru.ipynb b/nbs/models.gru.ipynb index 93e2032c2..099ae9b55 100644 --- a/nbs/models.gru.ipynb +++ b/nbs/models.gru.ipynb @@ -121,6 +121,7 @@ " `valid_batch_size`: int=None, number of different series in each validation and test batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -162,6 +163,7 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str='robust',\n", " random_seed=1,\n", + " num_workers_loader=0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -188,6 +190,7 @@ " stat_exog_list=stat_exog_list,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.informer.ipynb b/nbs/models.informer.ipynb index dffc74e8b..e3ec5ce18 100644 --- a/nbs/models.informer.ipynb +++ b/nbs/models.informer.ipynb @@ -303,6 +303,7 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -353,6 +354,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -382,6 +384,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.itransformer.ipynb b/nbs/models.itransformer.ipynb index e3ad45c0a..694d19f8e 100644 --- a/nbs/models.itransformer.ipynb +++ b/nbs/models.itransformer.ipynb @@ -224,6 +224,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -269,6 +270,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -294,6 +296,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.kan.ipynb b/nbs/models.kan.ipynb index 3577b61d5..9ccb8460e 100644 --- a/nbs/models.kan.ipynb +++ b/nbs/models.kan.ipynb @@ -359,6 +359,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -408,6 +409,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -437,6 +439,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " dataloader_kwargs = dataloader_kwargs,\n", diff --git a/nbs/models.lstm.ipynb b/nbs/models.lstm.ipynb index 389bc6ac5..0813edb31 100644 --- a/nbs/models.lstm.ipynb +++ b/nbs/models.lstm.ipynb @@ -118,6 +118,7 @@ " `valid_batch_size`: int=None, number of different series in each validation and test batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -158,6 +159,7 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str = 'robust',\n", " random_seed = 1,\n", + " num_workers_loader = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -184,6 +186,7 @@ " stat_exog_list=stat_exog_list,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.mlp.ipynb b/nbs/models.mlp.ipynb index b644cf959..17ac94977 100644 --- a/nbs/models.mlp.ipynb +++ b/nbs/models.mlp.ipynb @@ -111,6 +111,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -150,6 +151,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -181,6 +183,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.mlpmultivariate.ipynb b/nbs/models.mlpmultivariate.ipynb index dfcab59d8..4704119c3 100644 --- a/nbs/models.mlpmultivariate.ipynb +++ b/nbs/models.mlpmultivariate.ipynb @@ -105,6 +105,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -140,6 +141,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -167,6 +169,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nbeats.ipynb b/nbs/models.nbeats.ipynb index a855c4f49..8a523b6ac 100644 --- a/nbs/models.nbeats.ipynb +++ b/nbs/models.nbeats.ipynb @@ -267,6 +267,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -312,6 +313,7 @@ " step_size: int = 1,\n", " scaler_type: str ='identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -345,6 +347,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nbeatsx.ipynb b/nbs/models.nbeatsx.ipynb index 04f56ea5b..91c85d156 100644 --- a/nbs/models.nbeatsx.ipynb +++ b/nbs/models.nbeatsx.ipynb @@ -411,6 +411,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random seed initialization for replicability.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -462,6 +463,7 @@ " step_size: int = 1,\n", " scaler_type: str = \"identity\",\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -499,6 +501,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nhits.ipynb b/nbs/models.nhits.ipynb index 43351c6a4..0ca4babf7 100644 --- a/nbs/models.nhits.ipynb +++ b/nbs/models.nhits.ipynb @@ -300,6 +300,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -351,6 +352,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -382,6 +384,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nlinear.ipynb b/nbs/models.nlinear.ipynb index 974256002..6695a3f1a 100644 --- a/nbs/models.nlinear.ipynb +++ b/nbs/models.nlinear.ipynb @@ -99,6 +99,7 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -139,6 +140,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -168,6 +170,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.patchtst.ipynb b/nbs/models.patchtst.ipynb index b5ecd50c1..2fec96889 100644 --- a/nbs/models.patchtst.ipynb +++ b/nbs/models.patchtst.ipynb @@ -659,6 +659,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -716,6 +717,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -745,6 +747,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.rmok.ipynb b/nbs/models.rmok.ipynb index 170e8c730..54a9d9d1d 100644 --- a/nbs/models.rmok.ipynb +++ b/nbs/models.rmok.ipynb @@ -355,6 +355,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -397,6 +398,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -422,6 +424,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.rnn.ipynb b/nbs/models.rnn.ipynb index 6fda8deae..b05173f43 100644 --- a/nbs/models.rnn.ipynb +++ b/nbs/models.rnn.ipynb @@ -123,6 +123,7 @@ " `valid_batch_size`: int=None, number of different series in each validation and test batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", " `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
\n", @@ -165,6 +166,7 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str='robust',\n", " random_seed=1,\n", + " num_workers_loader=0,\n", " drop_last_loader=False,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", @@ -191,6 +193,7 @@ " stat_exog_list=stat_exog_list,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.softs.ipynb b/nbs/models.softs.ipynb index 8738b216f..c6eacd06c 100644 --- a/nbs/models.softs.ipynb +++ b/nbs/models.softs.ipynb @@ -196,6 +196,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -239,6 +240,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -264,6 +266,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.stemgnn.ipynb b/nbs/models.stemgnn.ipynb index 357a6985f..d7b118d52 100644 --- a/nbs/models.stemgnn.ipynb +++ b/nbs/models.stemgnn.ipynb @@ -201,6 +201,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -238,6 +239,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'robust',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -265,6 +267,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.tcn.ipynb b/nbs/models.tcn.ipynb index ffa490066..828d76b91 100644 --- a/nbs/models.tcn.ipynb +++ b/nbs/models.tcn.ipynb @@ -123,6 +123,7 @@ " `val_check_steps`: int=100, Number of training steps between every validation loss check.
`batch_size`: int=32, number of differentseries in each batch.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -163,6 +164,7 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str ='robust',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -189,6 +191,7 @@ " stat_exog_list=stat_exog_list,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.tft.ipynb b/nbs/models.tft.ipynb index 6e313bb1a..1468ef156 100644 --- a/nbs/models.tft.ipynb +++ b/nbs/models.tft.ipynb @@ -693,6 +693,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random seed initialization for replicability.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -742,6 +743,7 @@ " scaler_type: str = \"robust\",\n", " drop_last_loader=False,\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler=None,\n", @@ -773,6 +775,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.tide.ipynb b/nbs/models.tide.ipynb index 3b096a26f..6a16d2b2b 100644 --- a/nbs/models.tide.ipynb +++ b/nbs/models.tide.ipynb @@ -164,6 +164,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -213,6 +214,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -244,6 +246,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.timellm.ipynb b/nbs/models.timellm.ipynb index 812515882..05fc571c1 100755 --- a/nbs/models.timellm.ipynb +++ b/nbs/models.timellm.ipynb @@ -288,6 +288,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -346,6 +347,7 @@ " scaler_type: str = 'identity',\n", " drop_last_loader: bool = False,\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -373,6 +375,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.timemixer.ipynb b/nbs/models.timemixer.ipynb index 10544dbe2..9bfdd9cc5 100644 --- a/nbs/models.timemixer.ipynb +++ b/nbs/models.timemixer.ipynb @@ -357,6 +357,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -407,6 +408,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -432,6 +434,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.timesnet.ipynb b/nbs/models.timesnet.ipynb index bc85e7126..751afb1e7 100644 --- a/nbs/models.timesnet.ipynb +++ b/nbs/models.timesnet.ipynb @@ -259,9 +259,11 @@ " Type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " random_seed : int (default=1)\n", " Random_seed for pytorch initializer and numpy generators.\n", + " num_workers_loader : int (default=0) \n", + " Workers to be used by `TimeSeriesDataLoader`.\n", " drop_last_loader : bool (default=False)\n", " If True `TimeSeriesDataLoader` drops last non-full batch.\n", - " `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)\n", + " `optimizer`: Subclass of 'torch.optim.Optimizer', optionval (default=None)\n", " User specified optimizer instead of the default choice (Adam).\n", " `optimizer_kwargs`: dict, optional (defualt=None)\n", " List of parameters used by the user specified `optimizer`.\n", @@ -310,6 +312,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'standard',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -339,6 +342,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.tsmixer.ipynb b/nbs/models.tsmixer.ipynb index 55080cad9..94a9e4125 100644 --- a/nbs/models.tsmixer.ipynb +++ b/nbs/models.tsmixer.ipynb @@ -247,6 +247,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -288,6 +289,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -314,6 +316,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.tsmixerx.ipynb b/nbs/models.tsmixerx.ipynb index 74ba735eb..cb0ba72b6 100644 --- a/nbs/models.tsmixerx.ipynb +++ b/nbs/models.tsmixerx.ipynb @@ -271,6 +271,7 @@ " `step_size`: int=1, step size between each window of temporal data.
\n", " `scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -312,6 +313,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -338,6 +340,7 @@ " step_size=step_size,\n", " scaler_type=scaler_type,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", diff --git a/nbs/models.vanillatransformer.ipynb b/nbs/models.vanillatransformer.ipynb index 232de7dfa..eb0fb7d2a 100644 --- a/nbs/models.vanillatransformer.ipynb +++ b/nbs/models.vanillatransformer.ipynb @@ -195,6 +195,7 @@ " `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
\n", " `scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
\n", + " `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
\n", " `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
\n", " `alias`: str, optional, Custom name of the model.
\n", " `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
\n", @@ -242,6 +243,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -270,6 +272,7 @@ " scaler_type=scaler_type,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", + " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py index fdad6184f..0923c7d1a 100644 --- a/neuralforecast/common/_base_model.py +++ b/neuralforecast/common/_base_model.py @@ -336,6 +336,16 @@ def _fit( dataloader_kwargs = ( self.dataloader_kwargs if self.dataloader_kwargs is not None else {} ) + + if self.num_workers_loader != 0: # value is not at its default + warnings.warn( + "The `num_workers_loader` argument is deprecated and will be removed in a future version. " + "Please provide num_workers through `dataloader_kwargs`, e.g. " + f"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`", + category=FutureWarning, + ) + dataloader_kwargs["num_workers"] = self.num_workers_loader + datamodule = datamodule_constructor( dataset=dataset, batch_size=batch_size, diff --git a/neuralforecast/common/_base_multivariate.py b/neuralforecast/common/_base_multivariate.py index a1f8a51ac..0fdc3b94d 100644 --- a/neuralforecast/common/_base_multivariate.py +++ b/neuralforecast/common/_base_multivariate.py @@ -46,6 +46,7 @@ def __init__( futr_exog_list=None, hist_exog_list=None, stat_exog_list=None, + num_workers_loader=0, drop_last_loader=False, random_seed=1, alias=None, @@ -124,6 +125,7 @@ def __init__( self.decompose_forecast = False # DataModule arguments + self.num_workers_loader = num_workers_loader self.dataloader_kwargs = dataloader_kwargs self.drop_last_loader = drop_last_loader # used by on_validation_epoch_end hook diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index 8331a63a0..1ec0322e4 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -16,6 +16,9 @@ from ..utils import get_indexer_raise_missing # %% ../../nbs/common.base_recurrent.ipynb 7 +import warnings + + class BaseRecurrent(BaseModel): """Base Recurrent @@ -46,6 +49,7 @@ def __init__( futr_exog_list=None, hist_exog_list=None, stat_exog_list=None, + num_workers_loader=0, drop_last_loader=False, random_seed=1, alias=None, @@ -117,6 +121,7 @@ def __init__( self.test_size = 0 # DataModule arguments + self.num_workers_loader = num_workers_loader self.dataloader_kwargs = dataloader_kwargs self.drop_last_loader = drop_last_loader # used by on_validation_epoch_end hook @@ -562,6 +567,15 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): else data_module_kwargs ) + if self.num_workers_loader != 0: # value is not at its default + warnings.warn( + "The `num_workers_loader` argument is deprecated and will be removed in a future version. " + "Please provide num_workers through `dataloader_kwargs`, e.g. " + f"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`", + category=FutureWarning, + ) + data_module_kwargs["num_workers"] = self.num_workers_loader + if step_size > 1: raise Exception("Recurrent models do not support step_size > 1") diff --git a/neuralforecast/common/_base_windows.py b/neuralforecast/common/_base_windows.py index 74a81b95a..dd4a4c869 100644 --- a/neuralforecast/common/_base_windows.py +++ b/neuralforecast/common/_base_windows.py @@ -49,6 +49,7 @@ def __init__( hist_exog_list=None, stat_exog_list=None, exclude_insample_y=False, + num_workers_loader=0, drop_last_loader=False, random_seed=1, alias=None, @@ -128,6 +129,7 @@ def __init__( self.decompose_forecast = False # DataModule arguments + self.num_workers_loader = num_workers_loader self.dataloader_kwargs = dataloader_kwargs self.drop_last_loader = drop_last_loader # used by on_validation_epoch_end hook diff --git a/neuralforecast/models/autoformer.py b/neuralforecast/models/autoformer.py index 069e3641d..6f74ecc67 100644 --- a/neuralforecast/models/autoformer.py +++ b/neuralforecast/models/autoformer.py @@ -439,6 +439,7 @@ class Autoformer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -491,6 +492,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -522,6 +524,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/bitcn.py b/neuralforecast/models/bitcn.py index cf4fc91df..53a775838 100644 --- a/neuralforecast/models/bitcn.py +++ b/neuralforecast/models/bitcn.py @@ -113,6 +113,7 @@ class BiTCN(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -158,6 +159,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -188,6 +190,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/deepar.py b/neuralforecast/models/deepar.py index 6b16f51d1..fa54fe5ad 100644 --- a/neuralforecast/models/deepar.py +++ b/neuralforecast/models/deepar.py @@ -84,6 +84,7 @@ class DeepAR(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -136,6 +137,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -185,6 +187,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py index 3edeb0596..621446595 100644 --- a/neuralforecast/models/deepnpts.py +++ b/neuralforecast/models/deepnpts.py @@ -46,6 +46,7 @@ class DeepNPTS(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -93,6 +94,7 @@ def __init__( step_size: int = 1, scaler_type: str = "standard", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -139,6 +141,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/dilated_rnn.py b/neuralforecast/models/dilated_rnn.py index 96094c961..52722f96c 100644 --- a/neuralforecast/models/dilated_rnn.py +++ b/neuralforecast/models/dilated_rnn.py @@ -314,6 +314,7 @@ class DilatedRNN(BaseRecurrent): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -356,6 +357,7 @@ def __init__( step_size: int = 1, scaler_type: str = "robust", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -383,6 +385,7 @@ def __init__( stat_exog_list=stat_exog_list, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/dlinear.py b/neuralforecast/models/dlinear.py index 3af5f11c0..cdb080fbe 100644 --- a/neuralforecast/models/dlinear.py +++ b/neuralforecast/models/dlinear.py @@ -72,6 +72,7 @@ class DLinear(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -115,6 +116,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -146,6 +148,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/fedformer.py b/neuralforecast/models/fedformer.py index 7cfe3c5a6..1692fa953 100644 --- a/neuralforecast/models/fedformer.py +++ b/neuralforecast/models/fedformer.py @@ -437,6 +437,7 @@ class FEDformer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -488,6 +489,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -518,6 +520,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/gru.py b/neuralforecast/models/gru.py index 5fa61d0b5..795eeb1b9 100644 --- a/neuralforecast/models/gru.py +++ b/neuralforecast/models/gru.py @@ -48,6 +48,7 @@ class GRU(BaseRecurrent): `valid_batch_size`: int=None, number of different series in each validation and test batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -91,6 +92,7 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed=1, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -118,6 +120,7 @@ def __init__( stat_exog_list=stat_exog_list, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/informer.py b/neuralforecast/models/informer.py index cb4ff2622..ff917d19b 100644 --- a/neuralforecast/models/informer.py +++ b/neuralforecast/models/informer.py @@ -223,6 +223,7 @@ class Informer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -275,6 +276,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -306,6 +308,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/itransformer.py b/neuralforecast/models/itransformer.py index 121eac2b5..9e577a71d 100644 --- a/neuralforecast/models/itransformer.py +++ b/neuralforecast/models/itransformer.py @@ -131,6 +131,7 @@ class iTransformer(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -177,6 +178,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -204,6 +206,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/kan.py b/neuralforecast/models/kan.py index e442fdbd4..035262555 100644 --- a/neuralforecast/models/kan.py +++ b/neuralforecast/models/kan.py @@ -281,6 +281,7 @@ class KAN(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -331,6 +332,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -362,6 +364,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, dataloader_kwargs=dataloader_kwargs, diff --git a/neuralforecast/models/lstm.py b/neuralforecast/models/lstm.py index bb8906b8d..976c30c22 100644 --- a/neuralforecast/models/lstm.py +++ b/neuralforecast/models/lstm.py @@ -47,6 +47,7 @@ class LSTM(BaseRecurrent): `valid_batch_size`: int=None, number of different series in each validation and test batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -89,6 +90,7 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed=1, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -116,6 +118,7 @@ def __init__( stat_exog_list=stat_exog_list, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/mlp.py b/neuralforecast/models/mlp.py index 535c41424..c49cdf4d6 100644 --- a/neuralforecast/models/mlp.py +++ b/neuralforecast/models/mlp.py @@ -46,6 +46,7 @@ class MLP(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -87,6 +88,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -120,6 +122,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/mlpmultivariate.py b/neuralforecast/models/mlpmultivariate.py index f03ec7222..2e3b3a271 100644 --- a/neuralforecast/models/mlpmultivariate.py +++ b/neuralforecast/models/mlpmultivariate.py @@ -40,6 +40,7 @@ class MLPMultivariate(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -77,6 +78,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -106,6 +108,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nbeats.py b/neuralforecast/models/nbeats.py index 1fb4f07b8..c2f35d8fb 100644 --- a/neuralforecast/models/nbeats.py +++ b/neuralforecast/models/nbeats.py @@ -225,6 +225,7 @@ class NBEATS(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -272,6 +273,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -307,6 +309,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nbeatsx.py b/neuralforecast/models/nbeatsx.py index 10e37f608..b653ad22a 100644 --- a/neuralforecast/models/nbeatsx.py +++ b/neuralforecast/models/nbeatsx.py @@ -312,6 +312,7 @@ class NBEATSx(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random seed initialization for replicability.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -363,6 +364,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -401,6 +403,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nhits.py b/neuralforecast/models/nhits.py index f16db81a3..e4ea36cc8 100644 --- a/neuralforecast/models/nhits.py +++ b/neuralforecast/models/nhits.py @@ -223,6 +223,7 @@ class NHITS(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -276,6 +277,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -309,6 +311,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nlinear.py b/neuralforecast/models/nlinear.py index 4bad929b1..ce436add8 100644 --- a/neuralforecast/models/nlinear.py +++ b/neuralforecast/models/nlinear.py @@ -36,6 +36,7 @@ class NLinear(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -78,6 +79,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -109,6 +111,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/patchtst.py b/neuralforecast/models/patchtst.py index 25770b71c..a9ce9271f 100644 --- a/neuralforecast/models/patchtst.py +++ b/neuralforecast/models/patchtst.py @@ -833,6 +833,7 @@ class PatchTST(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -892,6 +893,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -923,6 +925,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/rmok.py b/neuralforecast/models/rmok.py index fc66483d6..35db80aca 100644 --- a/neuralforecast/models/rmok.py +++ b/neuralforecast/models/rmok.py @@ -281,6 +281,7 @@ class RMoK(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -324,6 +325,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -351,6 +353,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/rnn.py b/neuralforecast/models/rnn.py index d3f8b4fff..a2b83cc9b 100644 --- a/neuralforecast/models/rnn.py +++ b/neuralforecast/models/rnn.py @@ -48,6 +48,7 @@ class RNN(BaseRecurrent): `valid_batch_size`: int=None, number of different series in each validation and test batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
`optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.
@@ -92,6 +93,7 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed=1, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -119,6 +121,7 @@ def __init__( stat_exog_list=stat_exog_list, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/softs.py b/neuralforecast/models/softs.py index a40f32beb..cb425200a 100644 --- a/neuralforecast/models/softs.py +++ b/neuralforecast/models/softs.py @@ -106,6 +106,7 @@ class SOFTS(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -150,6 +151,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -177,6 +179,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/stemgnn.py b/neuralforecast/models/stemgnn.py index 69cdc4ef5..228618d9e 100644 --- a/neuralforecast/models/stemgnn.py +++ b/neuralforecast/models/stemgnn.py @@ -166,6 +166,7 @@ class StemGNN(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -205,6 +206,7 @@ def __init__( step_size: int = 1, scaler_type: str = "robust", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -234,6 +236,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/tcn.py b/neuralforecast/models/tcn.py index 70dd9c37f..ded621b62 100644 --- a/neuralforecast/models/tcn.py +++ b/neuralforecast/models/tcn.py @@ -44,6 +44,7 @@ class TCN(BaseRecurrent): `val_check_steps`: int=100, Number of training steps between every validation loss check.
`batch_size`: int=32, number of differentseries in each batch.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -86,6 +87,7 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -113,6 +115,7 @@ def __init__( stat_exog_list=stat_exog_list, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/tft.py b/neuralforecast/models/tft.py index 616496fc0..6c17c2aa6 100644 --- a/neuralforecast/models/tft.py +++ b/neuralforecast/models/tft.py @@ -454,6 +454,7 @@ class TFT(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random seed initialization for replicability.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -503,6 +504,7 @@ def __init__( scaler_type: str = "robust", drop_last_loader=False, random_seed: int = 1, + num_workers_loader: int = 0, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -534,6 +536,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/tide.py b/neuralforecast/models/tide.py index 1f8f7144f..ec98c2b13 100644 --- a/neuralforecast/models/tide.py +++ b/neuralforecast/models/tide.py @@ -78,6 +78,7 @@ class TiDE(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -129,6 +130,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -161,6 +163,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/timellm.py b/neuralforecast/models/timellm.py index bec5fb453..c0ef750c8 100644 --- a/neuralforecast/models/timellm.py +++ b/neuralforecast/models/timellm.py @@ -211,6 +211,7 @@ class TimeLLM(BaseWindows): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -270,6 +271,7 @@ def __init__( scaler_type: str = "identity", drop_last_loader: bool = False, random_seed: int = 1, + num_workers_loader: int = 0, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -299,6 +301,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/timemixer.py b/neuralforecast/models/timemixer.py index cdaea20bc..5585539bd 100644 --- a/neuralforecast/models/timemixer.py +++ b/neuralforecast/models/timemixer.py @@ -282,6 +282,7 @@ class TimeMixer(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -333,6 +334,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -360,6 +362,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/timesnet.py b/neuralforecast/models/timesnet.py index 9089a8796..cd04a79da 100644 --- a/neuralforecast/models/timesnet.py +++ b/neuralforecast/models/timesnet.py @@ -178,9 +178,11 @@ class TimesNet(BaseWindows): Type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
random_seed : int (default=1) Random_seed for pytorch initializer and numpy generators. + num_workers_loader : int (default=0) + Workers to be used by `TimeSeriesDataLoader`. drop_last_loader : bool (default=False) If True `TimeSeriesDataLoader` drops last non-full batch. - `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None) + `optimizer`: Subclass of 'torch.optim.Optimizer', optionval (default=None) User specified optimizer instead of the default choice (Adam). `optimizer_kwargs`: dict, optional (defualt=None) List of parameters used by the user specified `optimizer`. @@ -231,6 +233,7 @@ def __init__( step_size: int = 1, scaler_type: str = "standard", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -262,6 +265,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/tsmixer.py b/neuralforecast/models/tsmixer.py index 17fae38be..0d68e1e4c 100644 --- a/neuralforecast/models/tsmixer.py +++ b/neuralforecast/models/tsmixer.py @@ -157,6 +157,7 @@ class TSMixer(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -200,6 +201,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -228,6 +230,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/tsmixerx.py b/neuralforecast/models/tsmixerx.py index 97747bbb4..24897d442 100644 --- a/neuralforecast/models/tsmixerx.py +++ b/neuralforecast/models/tsmixerx.py @@ -185,6 +185,7 @@ class TSMixerx(BaseMultivariate): `step_size`: int=1, step size between each window of temporal data.
`scaler_type`: str='identity', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -228,6 +229,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -256,6 +258,7 @@ def __init__( step_size=step_size, scaler_type=scaler_type, random_seed=random_seed, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, diff --git a/neuralforecast/models/vanillatransformer.py b/neuralforecast/models/vanillatransformer.py index e38c03fc9..b9d987a17 100644 --- a/neuralforecast/models/vanillatransformer.py +++ b/neuralforecast/models/vanillatransformer.py @@ -114,6 +114,7 @@ class VanillaTransformer(BaseWindows): `start_padding_enabled`: bool=False, if True, the model will pad the time series with zeros at the beginning, by input size.
`scaler_type`: str='robust', type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
`random_seed`: int=1, random_seed for pytorch initializer and numpy generators.
+ `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.
`drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.
`alias`: str, optional, Custom name of the model.
`optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).
@@ -163,6 +164,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, + num_workers_loader: int = 0, drop_last_loader: bool = False, optimizer=None, optimizer_kwargs=None, @@ -193,6 +195,7 @@ def __init__( scaler_type=scaler_type, drop_last_loader=drop_last_loader, random_seed=random_seed, + num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, From b5861d65baafd7b0adbf03edf7d56e1d7ebe0e31 Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Thu, 24 Oct 2024 15:24:33 +1100 Subject: [PATCH 08/11] slight reformatting --- nbs/models.autoformer.ipynb | 2 +- nbs/models.deepar.ipynb | 4 ++-- nbs/models.deepnpts.ipynb | 4 ++-- nbs/models.dilated_rnn.ipynb | 2 +- nbs/models.dlinear.ipynb | 2 +- nbs/models.fedformer.ipynb | 2 +- nbs/models.gru.ipynb | 2 +- nbs/models.informer.ipynb | 2 +- nbs/models.itransformer.ipynb | 1 + nbs/models.kan.ipynb | 2 +- nbs/models.lstm.ipynb | 2 +- nbs/models.mlp.ipynb | 2 +- nbs/models.mlpmultivariate.ipynb | 2 +- nbs/models.nbeats.ipynb | 2 +- nbs/models.nbeatsx.ipynb | 2 +- nbs/models.nhits.ipynb | 4 ++-- nbs/models.nlinear.ipynb | 2 +- nbs/models.patchtst.ipynb | 2 +- nbs/models.rmok.ipynb | 1 + nbs/models.rnn.ipynb | 2 +- nbs/models.softs.ipynb | 1 + nbs/models.stemgnn.ipynb | 4 ++-- nbs/models.tcn.ipynb | 4 ++-- nbs/models.tft.ipynb | 4 ++-- nbs/models.timellm.ipynb | 4 ++-- nbs/models.timesnet.ipynb | 6 +++--- nbs/models.vanillatransformer.ipynb | 2 +- neuralforecast/models/autoformer.py | 2 +- neuralforecast/models/deepar.py | 4 ++-- neuralforecast/models/deepnpts.py | 4 ++-- neuralforecast/models/dilated_rnn.py | 2 +- neuralforecast/models/dlinear.py | 2 +- neuralforecast/models/fedformer.py | 2 +- neuralforecast/models/gru.py | 2 +- neuralforecast/models/informer.py | 2 +- neuralforecast/models/kan.py | 2 +- neuralforecast/models/lstm.py | 2 +- neuralforecast/models/mlp.py | 2 +- neuralforecast/models/mlpmultivariate.py | 2 +- neuralforecast/models/nbeats.py | 2 +- neuralforecast/models/nbeatsx.py | 2 +- neuralforecast/models/nhits.py | 4 ++-- neuralforecast/models/nlinear.py | 2 +- neuralforecast/models/patchtst.py | 2 +- neuralforecast/models/rnn.py | 2 +- neuralforecast/models/stemgnn.py | 4 ++-- neuralforecast/models/tcn.py | 4 ++-- neuralforecast/models/tft.py | 4 ++-- neuralforecast/models/timellm.py | 4 ++-- neuralforecast/models/timesnet.py | 4 ++-- neuralforecast/models/vanillatransformer.py | 2 +- 51 files changed, 68 insertions(+), 65 deletions(-) diff --git a/nbs/models.autoformer.ipynb b/nbs/models.autoformer.ipynb index e16506fcd..9c6567f2e 100644 --- a/nbs/models.autoformer.ipynb +++ b/nbs/models.autoformer.ipynb @@ -534,9 +534,9 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.deepar.ipynb b/nbs/models.deepar.ipynb index 777563823..c25e27bf9 100644 --- a/nbs/models.deepar.ipynb +++ b/nbs/models.deepar.ipynb @@ -229,7 +229,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", + " num_workers_loader = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -271,9 +271,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb index 7b44f7ef5..4f5e7ee9f 100644 --- a/nbs/models.deepnpts.ipynb +++ b/nbs/models.deepnpts.ipynb @@ -164,7 +164,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'standard',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", + " num_workers_loader = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -203,9 +203,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.dilated_rnn.ipynb b/nbs/models.dilated_rnn.ipynb index 25ac95d3a..4b3bd374f 100644 --- a/nbs/models.dilated_rnn.ipynb +++ b/nbs/models.dilated_rnn.ipynb @@ -453,9 +453,9 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.dlinear.ipynb b/nbs/models.dlinear.ipynb index f94d0cc7e..ea1a38a43 100644 --- a/nbs/models.dlinear.ipynb +++ b/nbs/models.dlinear.ipynb @@ -229,9 +229,9 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.fedformer.ipynb b/nbs/models.fedformer.ipynb index a6cb7b4dc..2268c058d 100644 --- a/nbs/models.fedformer.ipynb +++ b/nbs/models.fedformer.ipynb @@ -525,9 +525,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.gru.ipynb b/nbs/models.gru.ipynb index 099ae9b55..7cb14f21c 100644 --- a/nbs/models.gru.ipynb +++ b/nbs/models.gru.ipynb @@ -188,9 +188,9 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.informer.ipynb b/nbs/models.informer.ipynb index e3ec5ce18..c8e30137c 100644 --- a/nbs/models.informer.ipynb +++ b/nbs/models.informer.ipynb @@ -382,9 +382,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.itransformer.ipynb b/nbs/models.itransformer.ipynb index 694d19f8e..5e134cfa0 100644 --- a/nbs/models.itransformer.ipynb +++ b/nbs/models.itransformer.ipynb @@ -194,6 +194,7 @@ "outputs": [], "source": [ "#| export\n", + "\n", "class iTransformer(BaseMultivariate):\n", "\n", " \"\"\" iTransformer\n", diff --git a/nbs/models.kan.ipynb b/nbs/models.kan.ipynb index 9ccb8460e..ac7cc5e2b 100644 --- a/nbs/models.kan.ipynb +++ b/nbs/models.kan.ipynb @@ -437,9 +437,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " dataloader_kwargs = dataloader_kwargs,\n", diff --git a/nbs/models.lstm.ipynb b/nbs/models.lstm.ipynb index 0813edb31..3eb469306 100644 --- a/nbs/models.lstm.ipynb +++ b/nbs/models.lstm.ipynb @@ -184,9 +184,9 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.mlp.ipynb b/nbs/models.mlp.ipynb index 17ac94977..46c09406f 100644 --- a/nbs/models.mlp.ipynb +++ b/nbs/models.mlp.ipynb @@ -181,9 +181,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.mlpmultivariate.ipynb b/nbs/models.mlpmultivariate.ipynb index 4704119c3..71abdfb04 100644 --- a/nbs/models.mlpmultivariate.ipynb +++ b/nbs/models.mlpmultivariate.ipynb @@ -167,9 +167,9 @@ " batch_size=batch_size,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nbeats.ipynb b/nbs/models.nbeats.ipynb index 8a523b6ac..9504770d5 100644 --- a/nbs/models.nbeats.ipynb +++ b/nbs/models.nbeats.ipynb @@ -345,9 +345,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nbeatsx.ipynb b/nbs/models.nbeatsx.ipynb index 91c85d156..9952c3cf9 100644 --- a/nbs/models.nbeatsx.ipynb +++ b/nbs/models.nbeatsx.ipynb @@ -499,9 +499,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size = step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nhits.ipynb b/nbs/models.nhits.ipynb index 0ca4babf7..e844f4660 100644 --- a/nbs/models.nhits.ipynb +++ b/nbs/models.nhits.ipynb @@ -352,7 +352,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'identity',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", + " num_workers_loader = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -382,9 +382,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.nlinear.ipynb b/nbs/models.nlinear.ipynb index 6695a3f1a..b55d42204 100644 --- a/nbs/models.nlinear.ipynb +++ b/nbs/models.nlinear.ipynb @@ -168,9 +168,9 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.patchtst.ipynb b/nbs/models.patchtst.ipynb index 2fec96889..31064cc24 100644 --- a/nbs/models.patchtst.ipynb +++ b/nbs/models.patchtst.ipynb @@ -745,9 +745,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.rmok.ipynb b/nbs/models.rmok.ipynb index 54a9d9d1d..017477c13 100644 --- a/nbs/models.rmok.ipynb +++ b/nbs/models.rmok.ipynb @@ -330,6 +330,7 @@ "outputs": [], "source": [ "#| export\n", + "\n", "class RMoK(BaseMultivariate):\n", " \"\"\" Reversible Mixture of KAN\n", " **Parameters**
\n", diff --git a/nbs/models.rnn.ipynb b/nbs/models.rnn.ipynb index b05173f43..f5e1a67b9 100644 --- a/nbs/models.rnn.ipynb +++ b/nbs/models.rnn.ipynb @@ -191,9 +191,9 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.softs.ipynb b/nbs/models.softs.ipynb index c6eacd06c..978f3c2c2 100644 --- a/nbs/models.softs.ipynb +++ b/nbs/models.softs.ipynb @@ -168,6 +168,7 @@ "outputs": [], "source": [ "#| export\n", + "\n", "class SOFTS(BaseMultivariate):\n", "\n", " \"\"\" SOFTS\n", diff --git a/nbs/models.stemgnn.ipynb b/nbs/models.stemgnn.ipynb index d7b118d52..b2222fc1c 100644 --- a/nbs/models.stemgnn.ipynb +++ b/nbs/models.stemgnn.ipynb @@ -239,7 +239,7 @@ " step_size: int = 1,\n", " scaler_type: str = 'robust',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", + " num_workers_loader = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -265,9 +265,9 @@ " batch_size=batch_size,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.tcn.ipynb b/nbs/models.tcn.ipynb index 828d76b91..dee324513 100644 --- a/nbs/models.tcn.ipynb +++ b/nbs/models.tcn.ipynb @@ -164,7 +164,7 @@ " valid_batch_size: Optional[int] = None,\n", " scaler_type: str ='robust',\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", + " num_workers_loader = 0,\n", " drop_last_loader = False,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", @@ -189,9 +189,9 @@ " futr_exog_list=futr_exog_list,\n", " hist_exog_list=hist_exog_list,\n", " stat_exog_list=stat_exog_list,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.tft.ipynb b/nbs/models.tft.ipynb index 1468ef156..bae287acf 100644 --- a/nbs/models.tft.ipynb +++ b/nbs/models.tft.ipynb @@ -741,9 +741,9 @@ " start_padding_enabled=False,\n", " step_size: int = 1,\n", " scaler_type: str = \"robust\",\n", + " num_workers_loader=0,\n", " drop_last_loader=False,\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", " optimizer=None,\n", " optimizer_kwargs=None,\n", " lr_scheduler=None,\n", @@ -773,9 +773,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.timellm.ipynb b/nbs/models.timellm.ipynb index 05fc571c1..67f4a03d1 100755 --- a/nbs/models.timellm.ipynb +++ b/nbs/models.timellm.ipynb @@ -345,9 +345,9 @@ " num_lr_decays: int = 0,\n", " early_stop_patience_steps: int = -1,\n", " scaler_type: str = 'identity',\n", + " num_workers_loader: int = 0,\n", " drop_last_loader: bool = False,\n", " random_seed: int = 1,\n", - " num_workers_loader: int = 0,\n", " optimizer = None,\n", " optimizer_kwargs = None,\n", " lr_scheduler = None,\n", @@ -373,9 +373,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.timesnet.ipynb b/nbs/models.timesnet.ipynb index 751afb1e7..37e5d46e4 100644 --- a/nbs/models.timesnet.ipynb +++ b/nbs/models.timesnet.ipynb @@ -259,11 +259,11 @@ " Type of scaler for temporal inputs normalization see [temporal scalers](https://nixtla.github.io/neuralforecast/common.scalers.html).
\n", " random_seed : int (default=1)\n", " Random_seed for pytorch initializer and numpy generators.\n", - " num_workers_loader : int (default=0) \n", + " num_workers_loader : int (default=0)\n", " Workers to be used by `TimeSeriesDataLoader`.\n", " drop_last_loader : bool (default=False)\n", " If True `TimeSeriesDataLoader` drops last non-full batch.\n", - " `optimizer`: Subclass of 'torch.optim.Optimizer', optionval (default=None)\n", + " `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)\n", " User specified optimizer instead of the default choice (Adam).\n", " `optimizer_kwargs`: dict, optional (defualt=None)\n", " List of parameters used by the user specified `optimizer`.\n", @@ -340,9 +340,9 @@ " start_padding_enabled = start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/nbs/models.vanillatransformer.ipynb b/nbs/models.vanillatransformer.ipynb index eb0fb7d2a..b76cc9ba2 100644 --- a/nbs/models.vanillatransformer.ipynb +++ b/nbs/models.vanillatransformer.ipynb @@ -270,9 +270,9 @@ " start_padding_enabled=start_padding_enabled,\n", " step_size=step_size,\n", " scaler_type=scaler_type,\n", + " num_workers_loader=num_workers_loader,\n", " drop_last_loader=drop_last_loader,\n", " random_seed=random_seed,\n", - " num_workers_loader=num_workers_loader,\n", " optimizer=optimizer,\n", " optimizer_kwargs=optimizer_kwargs,\n", " lr_scheduler=lr_scheduler,\n", diff --git a/neuralforecast/models/autoformer.py b/neuralforecast/models/autoformer.py index 6f74ecc67..815e57bc2 100644 --- a/neuralforecast/models/autoformer.py +++ b/neuralforecast/models/autoformer.py @@ -522,9 +522,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/deepar.py b/neuralforecast/models/deepar.py index fa54fe5ad..3d2a2fd94 100644 --- a/neuralforecast/models/deepar.py +++ b/neuralforecast/models/deepar.py @@ -137,7 +137,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -185,9 +185,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py index 621446595..f958e71be 100644 --- a/neuralforecast/models/deepnpts.py +++ b/neuralforecast/models/deepnpts.py @@ -94,7 +94,7 @@ def __init__( step_size: int = 1, scaler_type: str = "standard", random_seed: int = 1, - num_workers_loader: int = 0, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -139,9 +139,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/dilated_rnn.py b/neuralforecast/models/dilated_rnn.py index 52722f96c..d56cc5f08 100644 --- a/neuralforecast/models/dilated_rnn.py +++ b/neuralforecast/models/dilated_rnn.py @@ -383,9 +383,9 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/dlinear.py b/neuralforecast/models/dlinear.py index cdb080fbe..17965c869 100644 --- a/neuralforecast/models/dlinear.py +++ b/neuralforecast/models/dlinear.py @@ -146,9 +146,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/fedformer.py b/neuralforecast/models/fedformer.py index 1692fa953..89e2fe3ef 100644 --- a/neuralforecast/models/fedformer.py +++ b/neuralforecast/models/fedformer.py @@ -518,9 +518,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/gru.py b/neuralforecast/models/gru.py index 795eeb1b9..900eac162 100644 --- a/neuralforecast/models/gru.py +++ b/neuralforecast/models/gru.py @@ -118,9 +118,9 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/informer.py b/neuralforecast/models/informer.py index ff917d19b..8b115cebd 100644 --- a/neuralforecast/models/informer.py +++ b/neuralforecast/models/informer.py @@ -306,9 +306,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/kan.py b/neuralforecast/models/kan.py index 035262555..29d7b1d00 100644 --- a/neuralforecast/models/kan.py +++ b/neuralforecast/models/kan.py @@ -362,9 +362,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, dataloader_kwargs=dataloader_kwargs, diff --git a/neuralforecast/models/lstm.py b/neuralforecast/models/lstm.py index 976c30c22..e89db3628 100644 --- a/neuralforecast/models/lstm.py +++ b/neuralforecast/models/lstm.py @@ -116,9 +116,9 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/mlp.py b/neuralforecast/models/mlp.py index c49cdf4d6..0794ac7c3 100644 --- a/neuralforecast/models/mlp.py +++ b/neuralforecast/models/mlp.py @@ -120,9 +120,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/mlpmultivariate.py b/neuralforecast/models/mlpmultivariate.py index 2e3b3a271..7554bb44d 100644 --- a/neuralforecast/models/mlpmultivariate.py +++ b/neuralforecast/models/mlpmultivariate.py @@ -106,9 +106,9 @@ def __init__( batch_size=batch_size, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nbeats.py b/neuralforecast/models/nbeats.py index c2f35d8fb..02280fb79 100644 --- a/neuralforecast/models/nbeats.py +++ b/neuralforecast/models/nbeats.py @@ -307,9 +307,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nbeatsx.py b/neuralforecast/models/nbeatsx.py index b653ad22a..811392a66 100644 --- a/neuralforecast/models/nbeatsx.py +++ b/neuralforecast/models/nbeatsx.py @@ -401,9 +401,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nhits.py b/neuralforecast/models/nhits.py index e4ea36cc8..ce5caeaaa 100644 --- a/neuralforecast/models/nhits.py +++ b/neuralforecast/models/nhits.py @@ -277,7 +277,7 @@ def __init__( step_size: int = 1, scaler_type: str = "identity", random_seed: int = 1, - num_workers_loader: int = 0, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -309,9 +309,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/nlinear.py b/neuralforecast/models/nlinear.py index ce436add8..4909ddbd3 100644 --- a/neuralforecast/models/nlinear.py +++ b/neuralforecast/models/nlinear.py @@ -109,9 +109,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/patchtst.py b/neuralforecast/models/patchtst.py index a9ce9271f..0b2029fd4 100644 --- a/neuralforecast/models/patchtst.py +++ b/neuralforecast/models/patchtst.py @@ -923,9 +923,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/rnn.py b/neuralforecast/models/rnn.py index a2b83cc9b..f5d60f42a 100644 --- a/neuralforecast/models/rnn.py +++ b/neuralforecast/models/rnn.py @@ -119,9 +119,9 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/stemgnn.py b/neuralforecast/models/stemgnn.py index 228618d9e..85a014e65 100644 --- a/neuralforecast/models/stemgnn.py +++ b/neuralforecast/models/stemgnn.py @@ -206,7 +206,7 @@ def __init__( step_size: int = 1, scaler_type: str = "robust", random_seed: int = 1, - num_workers_loader: int = 0, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -234,9 +234,9 @@ def __init__( batch_size=batch_size, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/tcn.py b/neuralforecast/models/tcn.py index ded621b62..fd900512c 100644 --- a/neuralforecast/models/tcn.py +++ b/neuralforecast/models/tcn.py @@ -87,7 +87,7 @@ def __init__( valid_batch_size: Optional[int] = None, scaler_type: str = "robust", random_seed: int = 1, - num_workers_loader: int = 0, + num_workers_loader=0, drop_last_loader=False, optimizer=None, optimizer_kwargs=None, @@ -113,9 +113,9 @@ def __init__( futr_exog_list=futr_exog_list, hist_exog_list=hist_exog_list, stat_exog_list=stat_exog_list, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/tft.py b/neuralforecast/models/tft.py index 6c17c2aa6..f96d5646b 100644 --- a/neuralforecast/models/tft.py +++ b/neuralforecast/models/tft.py @@ -502,9 +502,9 @@ def __init__( start_padding_enabled=False, step_size: int = 1, scaler_type: str = "robust", + num_workers_loader=0, drop_last_loader=False, random_seed: int = 1, - num_workers_loader: int = 0, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -534,9 +534,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/timellm.py b/neuralforecast/models/timellm.py index c0ef750c8..aa9276f72 100644 --- a/neuralforecast/models/timellm.py +++ b/neuralforecast/models/timellm.py @@ -269,9 +269,9 @@ def __init__( num_lr_decays: int = 0, early_stop_patience_steps: int = -1, scaler_type: str = "identity", + num_workers_loader: int = 0, drop_last_loader: bool = False, random_seed: int = 1, - num_workers_loader: int = 0, optimizer=None, optimizer_kwargs=None, lr_scheduler=None, @@ -299,9 +299,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/timesnet.py b/neuralforecast/models/timesnet.py index cd04a79da..aab548382 100644 --- a/neuralforecast/models/timesnet.py +++ b/neuralforecast/models/timesnet.py @@ -182,7 +182,7 @@ class TimesNet(BaseWindows): Workers to be used by `TimeSeriesDataLoader`. drop_last_loader : bool (default=False) If True `TimeSeriesDataLoader` drops last non-full batch. - `optimizer`: Subclass of 'torch.optim.Optimizer', optionval (default=None) + `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None) User specified optimizer instead of the default choice (Adam). `optimizer_kwargs`: dict, optional (defualt=None) List of parameters used by the user specified `optimizer`. @@ -263,9 +263,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, diff --git a/neuralforecast/models/vanillatransformer.py b/neuralforecast/models/vanillatransformer.py index b9d987a17..69fcc9c4d 100644 --- a/neuralforecast/models/vanillatransformer.py +++ b/neuralforecast/models/vanillatransformer.py @@ -193,9 +193,9 @@ def __init__( start_padding_enabled=start_padding_enabled, step_size=step_size, scaler_type=scaler_type, + num_workers_loader=num_workers_loader, drop_last_loader=drop_last_loader, random_seed=random_seed, - num_workers_loader=num_workers_loader, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, lr_scheduler=lr_scheduler, From edf1ecb2f4b974a260c8f642dff8535f1a1237ca Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Thu, 31 Oct 2024 14:41:47 +1100 Subject: [PATCH 09/11] fix depracation clause --- nbs/common.base_model.ipynb | 2 +- nbs/common.base_recurrent.ipynb | 2 +- neuralforecast/common/_base_model.py | 2 +- neuralforecast/common/_base_recurrent.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb index fabd78bbf..2ae169f8f 100644 --- a/nbs/common.base_model.ipynb +++ b/nbs/common.base_model.ipynb @@ -368,7 +368,7 @@ " f\"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`\",\n", " category=FutureWarning,\n", " )\n", - " dataloader_kwargs['num_workers'] = self.num_workers_loader\n", + " dataloader_kwargs['num_workers'] = self.num_workers_loader\n", "\n", " datamodule = datamodule_constructor(\n", " dataset=dataset, \n", diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index d4091185e..db89dec78 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -550,7 +550,7 @@ " f\"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`\",\n", " category=FutureWarning,\n", " ) \n", - " data_module_kwargs['num_workers'] = self.num_workers_loader\n", + " data_module_kwargs['num_workers'] = self.num_workers_loader\n", " \n", " if step_size > 1:\n", " raise Exception('Recurrent models do not support step_size > 1')\n", diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py index 0923c7d1a..606ee8f0e 100644 --- a/neuralforecast/common/_base_model.py +++ b/neuralforecast/common/_base_model.py @@ -344,7 +344,7 @@ def _fit( f"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`", category=FutureWarning, ) - dataloader_kwargs["num_workers"] = self.num_workers_loader + dataloader_kwargs["num_workers"] = self.num_workers_loader datamodule = datamodule_constructor( dataset=dataset, diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index 1ec0322e4..7534ef281 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -574,7 +574,7 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): f"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`", category=FutureWarning, ) - data_module_kwargs["num_workers"] = self.num_workers_loader + data_module_kwargs["num_workers"] = self.num_workers_loader if step_size > 1: raise Exception("Recurrent models do not support step_size > 1") From 394ee619faa26d2e8721111391d9ecf1b0c5b1a6 Mon Sep 17 00:00:00 2001 From: jasminerienecker Date: Mon, 4 Nov 2024 09:51:02 +1100 Subject: [PATCH 10/11] revert changes to predict method --- nbs/common.base_recurrent.ipynb | 13 ------------- neuralforecast/common/_base_recurrent.py | 17 ----------------- 2 files changed, 30 deletions(-) diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index db89dec78..233f83ff4 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -79,9 +79,6 @@ "outputs": [], "source": [ "#| export\n", - "import warnings\n", - "\n", - "\n", "class BaseRecurrent(BaseModel):\n", " \"\"\" Base Recurrent\n", " \n", @@ -541,16 +538,6 @@ " self._check_exog(dataset)\n", " self._restart_seed(random_seed)\n", " data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs)\n", - " data_module_kwargs = self.dataloader_kwargs.update(data_module_kwargs) if self.dataloader_kwargs is not None else data_module_kwargs\n", - "\n", - " if self.num_workers_loader != 0: # value is not at its default\n", - " warnings.warn(\n", - " \"The `num_workers_loader` argument is deprecated and will be removed in a future version. \"\n", - " \"Please provide num_workers through `dataloader_kwargs`, e.g. \"\n", - " f\"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`\",\n", - " category=FutureWarning,\n", - " ) \n", - " data_module_kwargs['num_workers'] = self.num_workers_loader\n", " \n", " if step_size > 1:\n", " raise Exception('Recurrent models do not support step_size > 1')\n", diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index 7534ef281..ac592a95b 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -16,9 +16,6 @@ from ..utils import get_indexer_raise_missing # %% ../../nbs/common.base_recurrent.ipynb 7 -import warnings - - class BaseRecurrent(BaseModel): """Base Recurrent @@ -561,20 +558,6 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): self._check_exog(dataset) self._restart_seed(random_seed) data_module_kwargs = self._set_quantile_for_iqloss(**data_module_kwargs) - data_module_kwargs = ( - self.dataloader_kwargs.update(data_module_kwargs) - if self.dataloader_kwargs is not None - else data_module_kwargs - ) - - if self.num_workers_loader != 0: # value is not at its default - warnings.warn( - "The `num_workers_loader` argument is deprecated and will be removed in a future version. " - "Please provide num_workers through `dataloader_kwargs`, e.g. " - f"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`", - category=FutureWarning, - ) - data_module_kwargs["num_workers"] = self.num_workers_loader if step_size > 1: raise Exception("Recurrent models do not support step_size > 1") From 0ab38390cbadda41fccbf406c735e3c76c3613a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Morales?= Date: Fri, 8 Nov 2024 10:18:46 -0600 Subject: [PATCH 11/11] restore num_workers in predict --- nbs/common.base_recurrent.ipynb | 1 + neuralforecast/common/_base_recurrent.py | 1 + 2 files changed, 2 insertions(+) diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb index 233f83ff4..7b0ed5585 100644 --- a/nbs/common.base_recurrent.ipynb +++ b/nbs/common.base_recurrent.ipynb @@ -553,6 +553,7 @@ " datamodule = TimeSeriesDataModule(\n", " dataset=dataset,\n", " valid_batch_size=self.valid_batch_size,\n", + " num_workers=self.num_workers_loader,\n", " **data_module_kwargs\n", " )\n", " fcsts = trainer.predict(self, datamodule=datamodule)\n", diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py index ac592a95b..0479996c1 100644 --- a/neuralforecast/common/_base_recurrent.py +++ b/neuralforecast/common/_base_recurrent.py @@ -575,6 +575,7 @@ def predict(self, dataset, step_size=1, random_seed=None, **data_module_kwargs): datamodule = TimeSeriesDataModule( dataset=dataset, valid_batch_size=self.valid_batch_size, + num_workers=self.num_workers_loader, **data_module_kwargs, ) fcsts = trainer.predict(self, datamodule=datamodule)