Skip to content

Commit

Permalink
Add params_to_tune for DeepARModel and TFTModel (#1210)
Browse files Browse the repository at this point in the history
  • Loading branch information
Mr-Geekman authored Apr 7, 2023
1 parent db8cd3a commit 6c72c75
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 4 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Add default `params_to_tune` for `SARIMAXModel`, change default parameters for the model ([#1206](https://github.com/tinkoff-ai/etna/pull/1206))
- Add default `params_to_tune` for linear models ([#1204](https://github.com/tinkoff-ai/etna/pull/1204))
- Add default `params_to_tune` for `SeasonalMovingAverageModel`, `MovingAverageModel`, `NaiveModel` and `DeadlineMovingAverageModel` ([#1208](https://github.com/tinkoff-ai/etna/pull/1208))
- Add default `params_to_tune` for `DeepARModel` and `TFTModel` ([#1210](https://github.com/tinkoff-ai/etna/pull/1210))
### Fixed
- Fix bug in `GaleShapleyFeatureSelectionTransform` with wrong number of remaining features ([#1110](https://github.com/tinkoff-ai/etna/pull/1110))
- `ProphetModel` fails with additional seasonality set ([#1157](https://github.com/tinkoff-ai/etna/pull/1157))
Expand Down
24 changes: 22 additions & 2 deletions etna/models/nn/deepar.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@
from pytorch_forecasting.models import DeepAR
from pytorch_lightning import LightningModule

if SETTINGS.auto_required:
from optuna.distributions import BaseDistribution
from optuna.distributions import IntUniformDistribution
from optuna.distributions import LogUniformDistribution
from optuna.distributions import UniformDistribution


class DeepARModel(_DeepCopyMixin, PytorchForecastingMixin, SaveNNMixin, PredictionIntervalContextRequiredAbstractModel):
"""Wrapper for :py:class:`pytorch_forecasting.models.deepar.DeepAR`.
Expand Down Expand Up @@ -240,12 +246,26 @@ def predict(
def get_model(self) -> Any:
"""Get internal model that is used inside etna class.
Internal model is a model that is used inside etna to forecast segments,
e.g. :py:class:`catboost.CatBoostRegressor` or :py:class:`sklearn.linear_model.Ridge`.
Model is the instance of :py:class:`pytorch_forecasting.models.deepar.DeepAR`.
Returns
-------
:
Internal model
"""
return self.model

def params_to_tune(self) -> Dict[str, "BaseDistribution"]:
"""Get default grid for tuning hyperparameters.
Returns
-------
:
Grid to tune.
"""
return {
"hidden_size": IntUniformDistribution(low=4, high=64, step=4),
"rnn_layers": IntUniformDistribution(low=1, high=3, step=1),
"dropout": UniformDistribution(low=0, high=0.5),
"lr": LogUniformDistribution(low=1e-5, high=1e-2),
}
25 changes: 23 additions & 2 deletions etna/models/nn/tft.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@
from pytorch_forecasting.models import TemporalFusionTransformer
from pytorch_lightning import LightningModule

if SETTINGS.auto_required:
from optuna.distributions import BaseDistribution
from optuna.distributions import IntUniformDistribution
from optuna.distributions import LogUniformDistribution
from optuna.distributions import UniformDistribution


class TFTModel(_DeepCopyMixin, PytorchForecastingMixin, SaveNNMixin, PredictionIntervalContextRequiredAbstractModel):
"""Wrapper for :py:class:`pytorch_forecasting.models.temporal_fusion_transformer.TemporalFusionTransformer`.
Expand Down Expand Up @@ -269,12 +275,27 @@ def predict(
def get_model(self) -> Any:
"""Get internal model that is used inside etna class.
Internal model is a model that is used inside etna to forecast segments,
e.g. :py:class:`catboost.CatBoostRegressor` or :py:class:`sklearn.linear_model.Ridge`.
Model is the instance of :py:class:`pytorch_forecasting.models.temporal_fusion_transformer.TemporalFusionTransformer`.
Returns
-------
:
Internal model
"""
return self.model

def params_to_tune(self) -> Dict[str, "BaseDistribution"]:
"""Get default grid for tuning hyperparameters.
Returns
-------
:
Grid to tune.
"""
return {
"hidden_size": IntUniformDistribution(low=4, high=64, step=4),
"lstm_layers": IntUniformDistribution(low=1, high=3, step=1),
"dropout": UniformDistribution(low=0, high=0.5),
"attention_head_size": IntUniformDistribution(low=2, high=8, step=2),
"lr": LogUniformDistribution(low=1e-5, high=1e-2),
}
13 changes: 13 additions & 0 deletions tests/test_models/nn/test_deepar.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import pandas as pd
import pytest
from optuna.samplers import RandomSampler
from pytorch_forecasting.data import GroupNormalizer

from etna.datasets.tsdataset import TSDataset
Expand Down Expand Up @@ -191,3 +192,15 @@ def test_repr():
def test_deepar_forecast_throw_error_on_return_components():
with pytest.raises(NotImplementedError, match="This mode isn't currently implemented!"):
DeepARModel.forecast(self=Mock(), ts=Mock(), prediction_size=Mock(), return_components=True)


def test_params_to_tune():
model = DeepARModel(decoder_length=3, encoder_length=4)
grid = model.params_to_tune()
# we need sampler to get a value from distribution
sampler = RandomSampler()

assert len(grid) > 0
for name, distribution in grid.items():
value = sampler.sample_independent(study=None, trial=None, param_name=name, param_distribution=distribution)
_ = model.set_params(**{name: value})
13 changes: 13 additions & 0 deletions tests/test_models/nn/test_tft.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import pandas as pd
import pytest
from optuna.samplers import RandomSampler

from etna.metrics import MAE
from etna.models.nn import TFTModel
Expand Down Expand Up @@ -196,3 +197,15 @@ def test_repr():
def test_tft_forecast_throw_error_on_return_components():
with pytest.raises(NotImplementedError, match="This mode isn't currently implemented!"):
TFTModel.forecast(self=Mock(), ts=Mock(), prediction_size=Mock(), return_components=True)


def test_params_to_tune():
model = TFTModel(decoder_length=3, encoder_length=4)
grid = model.params_to_tune()
# we need sampler to get a value from distribution
sampler = RandomSampler()

assert len(grid) > 0
for name, distribution in grid.items():
value = sampler.sample_independent(study=None, trial=None, param_name=name, param_distribution=distribution)
_ = model.set_params(**{name: value})

0 comments on commit 6c72c75

Please sign in to comment.