Skip to content

openai api generated code for ETNA shortcuts #1309

Open
martins0n opened this issue Jul 6, 2023 · 0 comments
Open

openai api generated code for ETNA shortcuts #1309

martins0n opened this issue Jul 6, 2023 · 0 comments
Labels
enhancement New feature or request

Comments

@martins0n
Copy link
Contributor

🚀 Feature Request

As a newbie you can just prompt feature names and model just from general knowledge and get proper pipeline or forecast.

Proposal


import os
import ast
import openai
from hydra_slayer import get_from_params

from etna.auto.pool.utils import fill_template

openai.api_key = os.getenv("OPENAI_API_KEY")


SYSTEM_CONTEXT = """

Our task to generate pipeline with model and transforms for time series forecasting.
All possible models and transforms you is here in ```PROTOCOLS AND DESCRIPTIONS: ...``` section:

PROTOCOLS AND DESCRIPTIONS:
```
class PerSegmentModelMixin(ModelForecastingMixin):def __init__(self, base_model: Any):
class MultiSegmentModelMixin(ModelForecastingMixin):def __init__(self, base_model: Any):
class DeadlineMovingAverageModel(NonPredictionIntervalContextRequiredAbstractModel,):def __init__(self, window: int = 3, seasonality: str = "month"):
class SeasonalMovingAverageModel(NonPredictionIntervalContextRequiredAbstractModel,):def __init__(self, window: int = 5, seasonality: int = 7):
class SimpleExpSmoothingModel(PerSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self,initialization_method: str = "estimated",initial_level: Optional[float] = None,smoothing_level: Optional[float] = None,**fit_kwargs,):
class HoltWintersModel(PerSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self,trend: Optional[str] = None,damped_trend: bool = False,seasonal: Optional[str] = None,seasonal_periods: Optional[int] = None,initialization_method: str = "estimated",initial_level: Optional[float] = None,initial_trend: Optional[float] = None,initial_seasonal: Optional[Sequence[float]] = None,use_boxcox: Union[bool, str, float] = False,bounds: Optional[Dict[str, Tuple[float, float]]] = None,dates: Optional[Sequence[datetime]] = None,freq: Optional[str] = None,missing: str = "none",smoothing_level: Optional[float] = None,smoothing_trend: Optional[float] = None,smoothing_seasonal: Optional[float] = None,damping_trend: Optional[float] = None,**fit_kwargs,):
class HoltModel(PerSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self,exponential: bool = False,damped_trend: bool = False,initialization_method: str = "estimated",initial_level: Optional[float] = None,initial_trend: Optional[float] = None,smoothing_level: Optional[float] = None,smoothing_trend: Optional[float] = None,damping_trend: Optional[float] = None,**fit_kwargs,):
class LinearPerSegmentModel(PerSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self, fit_intercept: bool = True, **kwargs):
class LinearMultiSegmentModel(MultiSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self, fit_intercept: bool = True, **kwargs):
class ElasticMultiSegmentModel(MultiSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self, alpha: float = 1.0, l1_ratio: float = 0.5, fit_intercept: bool = True, **kwargs):
class ElasticPerSegmentModel(PerSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self, alpha: float = 1.0, l1_ratio: float = 0.5, fit_intercept: bool = True, **kwargs):
class CatBoostPerSegmentModel(PerSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self,iterations: Optional[int] = None,depth: Optional[int] = None,learning_rate: Optional[float] = None,logging_level: Optional[str] = "Silent",l2_leaf_reg: Optional[float] = None,thread_count: Optional[int] = None,**kwargs,):
class CatBoostMultiSegmentModel(MultiSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self,iterations: Optional[int] = None,depth: Optional[int] = None,learning_rate: Optional[float] = None,logging_level: Optional[str] = "Silent",l2_leaf_reg: Optional[float] = None,thread_count: Optional[int] = None,**kwargs,):
class SARIMAXModel(PerSegmentModelMixin, PredictionIntervalContextIgnorantModelMixin, PredictionIntervalContextIgnorantAbstractModel):def __init__(self,order: Tuple[int, int, int] = (1, 0, 0),seasonal_order: Tuple[int, int, int, int] = (0, 0, 0, 0),trend: Optional[str] = None,measurement_error: bool = False,time_varying_regression: bool = False,mle_regression: bool = True,simple_differencing: bool = False,enforce_stationarity: bool = True,enforce_invertibility: bool = True,hamilton_representation: bool = False,concentrate_scale: bool = False,trend_offset: float = 1,use_exact_diffuse: bool = False,dates: Optional[List[datetime]] = None,freq: Optional[str] = None,missing: str = "none",validate_specification: bool = True,**kwargs,):
class MovingAverageModel(SeasonalMovingAverageModel):def __init__(self, window: int = 5):
class NaiveModel(SeasonalMovingAverageModel):def __init__(self, lag: int = 1):
class ProphetModel(PerSegmentModelMixin, PredictionIntervalContextIgnorantModelMixin, PredictionIntervalContextIgnorantAbstractModel):def __init__(self,growth: str = "linear",changepoints: Optional[List[datetime]] = None,n_changepoints: int = 25,changepoint_range: float = 0.8,yearly_seasonality: Union[str, bool] = "auto",weekly_seasonality: Union[str, bool] = "auto",daily_seasonality: Union[str, bool] = "auto",holidays: Optional[pd.DataFrame] = None,seasonality_mode: str = "additive",seasonality_prior_scale: float = 10.0,holidays_prior_scale: float = 10.0,changepoint_prior_scale: float = 0.05,mcmc_samples: int = 0,interval_width: float = 0.8,uncertainty_samples: Union[int, bool] = 1000,stan_backend: Optional[str] = None,additional_seasonality_params: Iterable[Dict[str, Union[str, float, int]]] = (),):
class BATSModel(PerSegmentModelMixin, PredictionIntervalContextIgnorantModelMixin, PredictionIntervalContextIgnorantAbstractModel):def __init__(self,use_box_cox: Optional[bool] = None,box_cox_bounds: Tuple[int, int] = (0, 1),use_trend: Optional[bool] = None,use_damped_trend: Optional[bool] = None,seasonal_periods: Optional[Iterable[int]] = None,use_arma_errors: bool = True,show_warnings: bool = True,n_jobs: Optional[int] = None,multiprocessing_start_method: str = "spawn",context: Optional[ContextInterface] = None,):
class TBATSModel(PerSegmentModelMixin, PredictionIntervalContextIgnorantModelMixin, PredictionIntervalContextIgnorantAbstractModel):def __init__(self,use_box_cox: Optional[bool] = None,box_cox_bounds: Tuple[int, int] = (0, 1),use_trend: Optional[bool] = None,use_damped_trend: Optional[bool] = None,seasonal_periods: Optional[Iterable[int]] = None,use_arma_errors: bool = True,show_warnings: bool = True,n_jobs: Optional[int] = None,multiprocessing_start_method: str = "spawn",context: Optional[ContextInterface] = None,):
class SklearnPerSegmentModel(PerSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self, regressor: RegressorMixin):
class SklearnMultiSegmentModel(MultiSegmentModelMixin,NonPredictionIntervalContextIgnorantModelMixin,NonPredictionIntervalContextIgnorantAbstractModel,):def __init__(self, regressor: RegressorMixin):
class AutoARIMAModel(PerSegmentModelMixin, PredictionIntervalContextIgnorantModelMixin, PredictionIntervalContextIgnorantAbstractModel):def __init__(self,**kwargs,):
class DeepBaseNet(DeepAbstractNet, LightningModule):def __init__(self):
class DeepBaseModel(DeepBaseAbstractModel, SaveNNMixin, NonPredictionIntervalContextRequiredAbstractModel):def __init__(self,*,net: DeepBaseNet,encoder_length: int,decoder_length: int,train_batch_size: int,test_batch_size: int,trainer_params: Optional[dict],train_dataloader_params: Optional[dict],test_dataloader_params: Optional[dict],val_dataloader_params: Optional[dict],split_params: Optional[dict],):
class MLPNet(DeepBaseNet):def __init__(self,input_size: int,hidden_size: List[int],lr: float,loss: "torch.nn.Module",optimizer_params: Optional[dict],) -> None:super().__init__()self.input_size = input_sizeself.hidden_size = hidden_sizeself.lr = lrself.loss = lossself.optimizer_params = {} if optimizer_params is None else optimizer_paramslayers = [nn.Linear(in_features=input_size, out_features=hidden_size[0]), nn.ReLU()]for i in range(1, len(hidden_size)):
class MLPModel(DeepBaseModel):def __init__(self,input_size: int,decoder_length: int,hidden_size: List,encoder_length: int = 0,lr: float = 1e-3,loss: Optional["torch.nn.Module"] = None,train_batch_size: int = 16,test_batch_size: int = 16,optimizer_params: Optional[dict] = None,trainer_params: Optional[dict] = None,train_dataloader_params: Optional[dict] = None,test_dataloader_params: Optional[dict] = None,val_dataloader_params: Optional[dict] = None,split_params: Optional[dict] = None,):
class PytorchForecastingDatasetBuilder(BaseMixin):def __init__(self,max_encoder_length: int = 30,min_encoder_length: Optional[int] = None,min_prediction_idx: Optional[int] = None,min_prediction_length: Optional[int] = None,max_prediction_length: int = 1,static_categoricals: Optional[List[str]] = None,static_reals: Optional[List[str]] = None,time_varying_known_categoricals: Optional[List[str]] = None,time_varying_known_reals: Optional[List[str]] = None,time_varying_unknown_categoricals: Optional[List[str]] = None,time_varying_unknown_reals: Optional[List[str]] = None,variable_groups: Optional[Dict[str, List[int]]] = None,constant_fill_strategy: Optional[Dict[str, Union[str, float, int, bool]]] = None,allow_missing_timesteps: bool = True,lags: Optional[Dict[str, List[int]]] = None,add_relative_time_idx: bool = True,add_target_scales: bool = True,add_encoder_length: Union[bool, str] = True,target_normalizer: Union[NORMALIZER, str, List[NORMALIZER], Tuple[NORMALIZER]] = "auto",categorical_encoders: Optional[Dict[str, NaNLabelEncoder]] = None,scalers: Optional[Dict[str, Union[StandardScaler, RobustScaler, TorchNormalizer, EncoderNormalizer]]] = None,):
class TFTModel(_DeepCopyMixin, PytorchForecastingMixin, SaveNNMixin, PredictionIntervalContextRequiredAbstractModel):def __init__(self,decoder_length: Optional[int] = None,encoder_length: Optional[int] = None,dataset_builder: Optional[PytorchForecastingDatasetBuilder] = None,train_batch_size: int = 64,test_batch_size: int = 64,lr: float = 1e-3,hidden_size: int = 16,lstm_layers: int = 1,attention_head_size: int = 4,dropout: float = 0.1,hidden_continuous_size: int = 8,loss: "MultiHorizonMetric" = None,trainer_params: Optional[Dict[str, Any]] = None,quantiles_kwargs: Optional[Dict[str, Any]] = None,**kwargs,):
class RNNNet(DeepBaseNet):def __init__(self,input_size: int,num_layers: int,hidden_size: int,lr: float,loss: "torch.nn.Module",optimizer_params: Optional[dict],) -> None:super().__init__()self.num_layers = num_layersself.input_size = input_sizeself.hidden_size = hidden_sizeself.loss = torch.nn.MSELoss() if loss is None else lossself.rnn = nn.LSTM(num_layers=self.num_layers, hidden_size=self.hidden_size, input_size=self.input_size, batch_first=True)self.projection = nn.Linear(in_features=self.hidden_size, out_features=1)self.lr = lrself.optimizer_params = {} if optimizer_params is None else optimizer_paramsdef forward(self, x: RNNBatch, *args, **kwargs):
class RNNModel(DeepBaseModel):def __init__(self,input_size: int,decoder_length: int,encoder_length: int,num_layers: int = 2,hidden_size: int = 16,lr: float = 1e-3,loss: Optional["torch.nn.Module"] = None,train_batch_size: int = 16,test_batch_size: int = 16,optimizer_params: Optional[dict] = None,trainer_params: Optional[dict] = None,train_dataloader_params: Optional[dict] = None,test_dataloader_params: Optional[dict] = None,val_dataloader_params: Optional[dict] = None,split_params: Optional[dict] = None,):
class DeepARModel(_DeepCopyMixin, PytorchForecastingMixin, SaveNNMixin, PredictionIntervalContextRequiredAbstractModel):def __init__(self,decoder_length: Optional[int] = None,encoder_length: Optional[int] = None,dataset_builder: Optional[PytorchForecastingDatasetBuilder] = None,train_batch_size: int = 64,test_batch_size: int = 64,lr: float = 1e-3,cell_type: str = "LSTM",hidden_size: int = 10,rnn_layers: int = 2,dropout: float = 0.1,loss: Optional["DistributionLoss"] = None,trainer_params: Optional[Dict[str, Any]] = None,quantiles_kwargs: Optional[Dict[str, Any]] = None,):
class LDS(BaseMixin):def __init__(self,emission_coeff: Tensor,# (batch_size, seq_length, latent_dim)transition_coeff: Tensor,# (latent_dim, latent_dim)innovation_coeff: Tensor,# (batch_size, seq_length, latent_dim)noise_std: Tensor,# (batch_size, seq_length, 1)prior_mean: Tensor,# (batch_size, latent_dim)prior_cov: Tensor,# (batch_size, latent_dim, latent_dim)offset: Tensor,# (batch_size, seq_length, 1)seq_length: int,latent_dim: int,):
class CompositeSSM(SSM):def __init__(self, seasonal_ssms: List[SeasonalitySSM], nonseasonal_ssm: Optional[Union[LevelSSM, LevelTrendSSM]] = None):
class DaylySeasonalitySSM(SeasonalitySSM):def __init__(self):
class WeeklySeasonalitySSM(SeasonalitySSM):def __init__(self):
class YearlySeasonalitySSM(SeasonalitySSM):def __init__(self):
class SeasonalitySSM(LevelSSM):def __init__(self, num_seasons: int, timestamp_transform: Callable[[pd.Timestamp], int]):
class DeepStateModel(DeepBaseModel):def __init__(self,ssm: CompositeSSM,input_size: int,encoder_length: int,decoder_length: int,num_layers: int = 1,n_samples: int = 5,lr: float = 1e-3,train_batch_size: int = 16,test_batch_size: int = 16,optimizer_params: Optional[dict] = None,trainer_params: Optional[dict] = None,train_dataloader_params: Optional[dict] = None,test_dataloader_params: Optional[dict] = None,val_dataloader_params: Optional[dict] = None,split_params: Optional[dict] = None,):
class DeepStateNet(DeepBaseNet):def __init__(self,ssm: CompositeSSM,input_size: int,num_layers: int,n_samples: int,lr: float,optimizer_params: Optional[dict],):
class ReversiblePerSegmentWrapper(PerSegmentWrapper, ReversibleTransform):def __init__(self, transform: OneSegmentTransform, required_features: Union[Literal["all"], List[str]]):
class Transform(SaveMixin, BaseMixin):def __init__(self, required_features: Union[Literal["all"], List[str]]):
class IrreversibleTransform(Transform):def __init__(self, required_features: Union[Literal["all"], List[str]]):
class PerSegmentWrapper(Transform):def __init__(self, transform: OneSegmentTransform, required_features: Union[Literal["all"], List[str]]):
class ReversibleTransform(Transform):def __init__(self, required_features: Union[Literal["all"], List[str]]):
class IrreversiblePerSegmentWrapper(PerSegmentWrapper, IrreversibleTransform):def __init__(self, transform: OneSegmentTransform, required_features: Union[Literal["all"], List[str]]):
class ResampleWithDistributionTransform(IrreversiblePerSegmentWrapper):def __init__(self, in_column: str, distribution_column: str, inplace: bool = True, out_column: Optional[str] = None):
class TimeSeriesImputerTransform(ReversibleTransform):def __init__(self,in_column: str = "target",strategy: str = ImputerMode.constant,window: int = -1,seasonality: int = 1,default_value: Optional[float] = None,constant_value: float = 0,):
class FilterFeaturesTransform(ReversibleTransform):def __init__(self,include: Optional[Sequence[str]] = None,exclude: Optional[Sequence[str]] = None,return_features: bool = False,):
class MRMRFeatureSelectionTransform(BaseFeatureSelectionTransform):def __init__(self,relevance_table: RelevanceTable,top_k: int,features_to_use: Union[List[str], Literal["all"]] = "all",fast_redundancy: bool = False,relevance_aggregation_mode: str = AggregationMode.mean,redundancy_aggregation_mode: str = AggregationMode.mean,atol: float = 1e-10,return_features: bool = False,**relevance_params,):
class TreeFeatureSelectionTransform(BaseFeatureSelectionTransform):def __init__(self,model: Union[Literal["catboost"], Literal["random_forest"], TreeBasedRegressor],top_k: int,features_to_use: Union[List[str], Literal["all"]] = "all",return_features: bool = False,):
class SegmentGaleShapley(BaseGaleShapley):def __init__(self, name: str, ranked_candidates: List[str]):
class GaleShapleyFeatureSelectionTransform(BaseFeatureSelectionTransform):def __init__(self,relevance_table: RelevanceTable,top_k: int,features_to_use: Union[List[str], Literal["all"]] = "all",use_rank: bool = False,return_features: bool = False,**relevance_params,):
class BaseGaleShapley(BaseMixin):def __init__(self, name: str, ranked_candidates: List[str]):
class GaleShapleyMatcher(BaseMixin):def __init__(self, segments: List[SegmentGaleShapley], features: List[FeatureGaleShapley]):
class BaseFeatureSelectionTransform(ReversibleTransform, ABC):def __init__(self, features_to_use: Union[List[str], Literal["all"]] = "all", return_features: bool = False):
class LogTransform(ReversibleTransform):def __init__(self, in_column: str, base: int = 10, inplace: bool = True, out_column: Optional[str] = None):
class LagTransform(IrreversibleTransform, FutureMixin):def __init__(self, in_column: str, lags: Union[List[int], int], out_column: Optional[str] = None):
class ExogShiftTransform(IrreversibleTransform, FutureMixin):def __init__(self, lag: Union[int, Literal["auto"]], horizon: Optional[int] = None):
class StandardScalerTransform(SklearnTransform):def __init__(self,in_column: Optional[Union[str, List[str]]] = None,inplace: bool = True,out_column: Optional[str] = None,with_mean: bool = True,with_std: bool = True,mode: Union[TransformMode, str] = "per-segment",):
class RobustScalerTransform(SklearnTransform):def __init__(self,in_column: Optional[Union[str, List[str]]] = None,inplace: bool = True,out_column: Optional[str] = None,with_centering: bool = True,with_scaling: bool = True,quantile_range: Tuple[float, float] = (25, 75),unit_variance: bool = False,mode: Union[TransformMode, str] = "per-segment",):
class MaxAbsScalerTransform(SklearnTransform):def __init__(self,in_column: Optional[Union[str, List[str]]] = None,inplace: bool = True,out_column: Optional[str] = None,mode: Union[TransformMode, str] = "per-segment",):
class MinMaxScalerTransform(SklearnTransform):def __init__(self,in_column: Optional[Union[str, List[str]]] = None,inplace: bool = True,out_column: Optional[str] = None,feature_range: Tuple[float, float] = (0, 1),clip: bool = True,mode: Union[TransformMode, str] = "per-segment",):
class DifferencingTransform(ReversibleTransform):def __init__(self,in_column: str,period: int = 1,order: int = 1,inplace: bool = True,out_column: Optional[str] = None,):
class LambdaTransform(ReversibleTransform):def __init__(self,in_column: str,transform_func: Callable[[pd.DataFrame], pd.DataFrame],inplace: bool = True,out_column: Optional[str] = None,inverse_transform_func: Optional[Callable[[pd.DataFrame], pd.DataFrame]] = None,):
class MADTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class MeanTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,alpha: float = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class StdTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,ddof: int = 1,):
class WindowStatisticsTransform(IrreversibleTransform, ABC):def __init__(self,in_column: str,out_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,**kwargs,):
class SumTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class MinTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class MinMaxDifferenceTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class MaxTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class MedianTransform(WindowStatisticsTransform):def __init__(self,in_column: str,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class QuantileTransform(WindowStatisticsTransform):def __init__(self,in_column: str,quantile: float,window: int,seasonality: int = 1,min_periods: int = 1,fillna: float = 0,out_column: Optional[str] = None,):
class SklearnTransform(ReversibleTransform):def __init__(self,in_column: Optional[Union[str, List[str]]],out_column: Optional[str],transformer: TransformerMixin,inplace: bool = True,mode: Union[TransformMode, str] = "per-segment",):
class YeoJohnsonTransform(SklearnTransform):def __init__(self,in_column: Optional[Union[str, List[str]]] = None,inplace: bool = True,out_column: Optional[str] = None,standardize: bool = True,mode: Union[TransformMode, str] = "per-segment",):
class BoxCoxTransform(SklearnTransform):def __init__(self,in_column: Optional[Union[str, List[str]]] = None,inplace: bool = True,out_column: Optional[str] = None,standardize: bool = True,mode: Union[TransformMode, str] = "per-segment",):
class AddConstTransform(ReversibleTransform):def __init__(self, in_column: str, value: float, inplace: bool = True, out_column: Optional[str] = None):
class OutliersTransform(ReversibleTransform, ABC):def __init__(self, in_column: str):
class DensityOutliersTransform(OutliersTransform):def __init__(self,in_column: str,window_size: int = 15,distance_coef: float = 3,n_neighbors: int = 3,distance_func: Callable[[float, float], float] = absolute_difference_distance,):
class PredictionIntervalOutliersTransform(OutliersTransform):def __init__(self,in_column: str,model: Union[Literal["prophet"], Literal["sarimax"], Type["ProphetModel"], Type["SARIMAXModel"]],interval_width: float = 0.95,**model_kwargs,):
class MedianOutliersTransform(OutliersTransform):def __init__(self, in_column: str, window_size: int = 10, alpha: float = 3):
class LabelEncoderTransform(IrreversibleTransform):def __init__(self, in_column: str, out_column: Optional[str] = None, strategy: str = ImputerMode.mean):
class OneHotEncoderTransform(IrreversibleTransform):def __init__(self, in_column: str, out_column: Optional[str] = None):
class DateFlagsTransform(IrreversibleTransform, FutureMixin):def __init__(self,day_number_in_week: Optional[bool] = True,day_number_in_month: Optional[bool] = True,day_number_in_year: Optional[bool] = False,week_number_in_month: Optional[bool] = False,week_number_in_year: Optional[bool] = False,month_number_in_year: Optional[bool] = False,season_number: Optional[bool] = False,year_number: Optional[bool] = False,is_weekend: Optional[bool] = True,special_days_in_week: Sequence[int] = (),special_days_in_month: Sequence[int] = (),out_column: Optional[str] = None,):
class TimeFlagsTransform(IrreversibleTransform, FutureMixin):def __init__(self,minute_in_hour_number: bool = True,fifteen_minutes_in_hour_number: bool = False,hour_number: bool = True,half_hour_number: bool = False,half_day_number: bool = False,one_third_day_number: bool = False,out_column: Optional[str] = None,):
class FourierTransform(IrreversibleTransform, FutureMixin):def __init__(self,period: float,order: Optional[int] = None,mods: Optional[Sequence[int]] = None,out_column: Optional[str] = None,):
class SpecialDaysTransform(IrreversiblePerSegmentWrapper, FutureMixin):def __init__(self, find_special_weekday: bool = True, find_special_month_day: bool = True):
class HolidayTransform(IrreversibleTransform, FutureMixin):def __init__(self, iso_code: str = "RUS", out_column: Optional[str] = None):
class TheilSenTrendTransform(ReversiblePerSegmentWrapper):def __init__(self, in_column: str, poly_degree: int = 1, **regression_params):
class LinearTrendTransform(ReversiblePerSegmentWrapper):def __init__(self, in_column: str, poly_degree: int = 1, **regression_params):
class STLTransform(ReversiblePerSegmentWrapper):def __init__(self,in_column: str,period: int,model: Union[str, TimeSeriesModel] = "arima",robust: bool = False,model_kwargs: Optional[Dict[str, Any]] = None,stl_kwargs: Optional[Dict[str, Any]] = None,):
class ChangePointsTrendTransform(ReversibleChangePointsTransform):def __init__(self,in_column: str,change_points_model: Optional[BaseChangePointsModelAdapter] = None,per_interval_model: Optional[PerIntervalModel] = None,):
class TrendTransform(IrreversibleChangePointsTransform):def __init__(self,in_column: str,change_points_model: Optional[BaseChangePointsModelAdapter] = None,per_interval_model: Optional[PerIntervalModel] = None,out_column: Optional[str] = None,):
class ChangePointsSegmentationTransform(IrreversibleChangePointsTransform):def __init__(self,in_column: str,change_points_model: Optional[BaseChangePointsModelAdapter] = None,out_column: Optional[str] = None,):
class MedianPerIntervalModel(StatisticsPerIntervalModel):def __init__(self):
class MeanPerIntervalModel(StatisticsPerIntervalModel):def __init__(self):
class StatisticsPerIntervalModel(PerIntervalModel):def __init__(self, statistics_function: Callable[[np.ndarray], float]):
class ConstantPerIntervalModel(PerIntervalModel):def __init__(self):
class SklearnPreprocessingPerIntervalModel(PerIntervalModel):def __init__(self, preprocessing: TransformerMixin):
class SklearnRegressionPerIntervalModel(PerIntervalModel):def __init__(self, model: Optional[RegressorMixin] = None):
class RupturesChangePointsModel(BaseChangePointsModelAdapter):def __init__(self, change_points_model: BaseEstimator, **change_points_model_predict_params):
```


I'll give you some examples of prompts and expected results you should generate in ```EXAMPLES: ...``` section:

```
EXAMPLES:


Example 1:

given prompt:
a need simple model
expected result:
{'_target_': 'etna.pipeline.Pipeline', 'horizon': '${__aux__.horizon}', 'model': {'_target_': 'etna.models.NaiveModel', 'lag': 1}}

Example 2:

given prompt:
let's try gradient boosting with lag features, segment encoding and some date flags
expected result:
{'_target_': 'etna.pipeline.Pipeline', 'horizon': '${__aux__.horizon}', 'model': {'_target_': 'etna.models.CatBoostMultiSegmentModel'}, 'transforms': [{'_target_': 'etna.transforms.LagTransform', 'in_column': 'target', 'lags': '${shift:${horizon},[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]}'}, {'_target_': 'etna.transforms.SegmentEncoderTransform'}, {'_target_': 'etna.transforms.DateFlagsTransform', 'day_number_in_week': True, 'is_weekend': True, 'week_number_in_year': True}]}

Example 3:

given prompt:
boosting with data drift detection
expected result:
{'model': {'_target_': 'etna.models.catboost.CatBoostMultiSegmentModel'}, 'transforms': [{'in_column': 'target', 'change_points_model': {'change_points_model': {'_target_': 'ruptures.detection.binseg.Binseg'}, 'change_points_model_predict_params': {'n_bkps': 5}, '_target_': 'etna.transforms.decomposition.change_points_based.change_points_models.ruptures_based.RupturesChangePointsModel'}, '_target_': 'etna.transforms.decomposition.change_points_based.segmentation.ChangePointsSegmentationTransform'}], 'horizon': '${__aux__.horizon}', '_target_': 'etna.pipeline.pipeline.Pipeline'}

```

RULES:

* We can use only and only models and transforms from ```PROTOCOLS: ...``` section.
* I insist on using models and transorms from ```PROTOCOLS: ...``` section!!!!!
* It's all case sensitive, so be careful.
* In 'model' field classes from etna.models
* In 'transforms' field classes from etna.transforms
* You should respect protocol of each model and transform
* We need to use __aux__.horizon variable in 'horizon' field and shifts for lagged features
"""

def etnaGPT(prompt: str, horizon: int):
    completion = openai.ChatCompletion.create(
        model="gpt-3.5-turbo-16k",
        messages=[
            {"role": "system", "content": SYSTEM_CONTEXT},
            {"role": "user", "content": prompt},
        ],
        temperature=0,
    )
    message = completion.choices[0].message["content"] 
    message = ast.literal_eval(message)
        
    message = fill_template(message, {"horizon": horizon})
        
    pipe = get_from_params(**message)
    
    return pipe


print(etnaGPT("Regression with lags, data drift detection and date features", 14))

Test cases

No response

Additional context

No response

Sign up for free to subscribe to this conversation on GitHub. Already have an account? Sign in.
Labels
enhancement New feature or request
Projects
Status: Specification
Development

No branches or pull requests

1 participant