Skip to content

Commit

Permalink
Merge branch 'master' into release/1.10.0
Browse files Browse the repository at this point in the history
  • Loading branch information
martins0n authored Jun 15, 2022
2 parents 16fb8e3 + cfa3b65 commit 9c17a2d
Show file tree
Hide file tree
Showing 18 changed files with 469 additions and 434 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
-
-
-
- Jupyter extension for black ([#742](https://github.com/tinkoff-ai/etna/pull/742))
-
-
-
Expand Down
6 changes: 5 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
lint: isort-check black-check flake8-check mypy-check spell-check imported-deps-check
lint: isort-check black-check flake8-check mypy-check spell-check imported-deps-check notebooks-check

isort-check:
isort --skip etna/libs --sl -c etna/
Expand All @@ -22,11 +22,15 @@ spell-check:
imported-deps-check:
python -m scripts.check_imported_dependencies

notebooks-check:
black --check examples/*.ipynb

format:
isort --skip etna/libs --sl etna/
isort --skip etna/libs --sl tests/
black etna/
black tests/
black examples/*.ipynb
flake8 --exclude etna/libs etna/
flake8 --exclude etna/libs tests/ --select E,W,C,F401,N
mypy
Expand Down
4 changes: 2 additions & 2 deletions etna/analysis/outliers/hist_outliers.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ def optimal_sse(left: int, right: int, p: np.ndarray, pp: np.ndarray) -> float:
"""
if left == 0:
avg = p[right]
return pp[right] - avg ** 2 / (right - left + 1)
return pp[right] - avg**2 / (right - left + 1)
avg = p[right] - p[left - 1]
return pp[right] - pp[left - 1] - avg ** 2 / (right - left + 1)
return pp[right] - pp[left - 1] - avg**2 / (right - left + 1)


@numba.jit(nopython=True)
Expand Down
2 changes: 1 addition & 1 deletion etna/clustering/distances/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
class Distance(ABC, BaseMixin):
"""Base class for distances between series."""

def __init__(self, trim_series: bool = False, inf_value: float = sys.float_info.max // 10 ** 200):
def __init__(self, trim_series: bool = False, inf_value: float = sys.float_info.max // 10**200):
"""Init Distance.
Parameters
Expand Down
2 changes: 1 addition & 1 deletion etna/transforms/math/statistics.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def transform(self, df: pd.DataFrame) -> pd.DataFrame:
dataframe with results
"""
window = self.window if self.window != -1 else len(df)
self._alpha_range = np.array([self.alpha ** i for i in range(window)])
self._alpha_range = np.array([self.alpha**i for i in range(window)])
self._alpha_range = np.expand_dims(self._alpha_range, axis=0) # (1, window)
return super().transform(df)

Expand Down
7 changes: 4 additions & 3 deletions examples/EDA.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
"outputs": [],
"source": [
"import warnings\n",
"\n",
"warnings.filterwarnings(\"ignore\")"
]
},
Expand Down Expand Up @@ -609,7 +610,7 @@
" distribution_plot,\n",
" sample_acf_plot,\n",
" sample_pacf_plot,\n",
" plot_correlation_matrix\n",
" plot_correlation_matrix,\n",
")"
]
},
Expand Down Expand Up @@ -773,7 +774,7 @@
"metadata": {},
"outputs": [],
"source": [
"lags = LagTransform(in_column=\"target\", lags=[1,7], out_column=\"lag\")\n",
"lags = LagTransform(in_column=\"target\", lags=[1, 7], out_column=\"lag\")\n",
"ts.fit_transform([lags])"
]
},
Expand All @@ -797,7 +798,7 @@
}
],
"source": [
"plot_correlation_matrix(ts, segments=[\"segment_a\",\"segment_b\"], method=\"spearman\", vmin=0.5, vmax=1)"
"plot_correlation_matrix(ts, segments=[\"segment_a\", \"segment_b\"], method=\"spearman\", vmin=0.5, vmax=1)"
]
},
{
Expand Down
37 changes: 24 additions & 13 deletions examples/NN_examples.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
"from etna.models import SeasonalMovingAverageModel\n",
"\n",
"import warnings\n",
"\n",
"warnings.filterwarnings(\"ignore\")"
]
},
Expand Down Expand Up @@ -449,13 +450,17 @@
"\n",
"transform_date = DateFlagsTransform(day_number_in_week=True, day_number_in_month=False, out_column=\"dateflag\")\n",
"num_lags = 10\n",
"transform_lag = LagTransform(in_column=\"target\", lags=[HORIZON+i for i in range(num_lags)], out_column=\"target_lag\")\n",
"transform_lag = LagTransform(\n",
" in_column=\"target\",\n",
" lags=[HORIZON + i for i in range(num_lags)],\n",
" out_column=\"target_lag\",\n",
")\n",
"lag_columns = [f\"target_lag_{HORIZON+i}\" for i in range(num_lags)]\n",
"\n",
"transform_deepar = PytorchForecastingTransform(\n",
" max_encoder_length=HORIZON,\n",
" max_prediction_length=HORIZON,\n",
" time_varying_known_reals=[\"time_idx\"]+lag_columns,\n",
" time_varying_known_reals=[\"time_idx\"] + lag_columns,\n",
" time_varying_unknown_reals=[\"target\"],\n",
" time_varying_known_categoricals=[\"dateflag_day_number_in_week\"],\n",
" target_normalizer=GroupNormalizer(groups=[\"segment\"]),\n",
Expand Down Expand Up @@ -483,9 +488,11 @@
"model_deepar = DeepARModel(max_epochs=150, learning_rate=[0.01], gpus=0, batch_size=64)\n",
"metrics = [SMAPE(), MAPE(), MAE()]\n",
"\n",
"pipeline_deepar = Pipeline(model=model_deepar,\n",
" horizon=HORIZON,\n",
" transforms=[transform_lag, transform_date, transform_deepar])"
"pipeline_deepar = Pipeline(\n",
" model=model_deepar,\n",
" horizon=HORIZON,\n",
" transforms=[transform_lag, transform_date, transform_deepar],\n",
")"
]
},
{
Expand Down Expand Up @@ -884,7 +891,11 @@
"source": [
"transform_date = DateFlagsTransform(day_number_in_week=True, day_number_in_month=False, out_column=\"dateflag\")\n",
"num_lags = 10\n",
"transform_lag = LagTransform(in_column=\"target\", lags=[HORIZON+i for i in range(num_lags)], out_column=\"target_lag\")\n",
"transform_lag = LagTransform(\n",
" in_column=\"target\",\n",
" lags=[HORIZON + i for i in range(num_lags)],\n",
" out_column=\"target_lag\",\n",
")\n",
"lag_columns = [f\"target_lag_{HORIZON+i}\" for i in range(num_lags)]\n",
"\n",
"transform_tft = PytorchForecastingTransform(\n",
Expand All @@ -909,9 +920,11 @@
"\n",
"model_tft = TFTModel(max_epochs=200, learning_rate=[0.01], gpus=0, batch_size=64)\n",
"\n",
"pipeline_tft = Pipeline(model=model_tft,\n",
" horizon=HORIZON,\n",
" transforms=[transform_lag, transform_date, transform_tft])"
"pipeline_tft = Pipeline(\n",
" model=model_tft,\n",
" horizon=HORIZON,\n",
" transforms=[transform_lag, transform_date, transform_tft],\n",
")"
]
},
{
Expand Down Expand Up @@ -1326,11 +1339,9 @@
"outputs": [],
"source": [
"model_sma = SeasonalMovingAverageModel(window=5, seasonality=7)\n",
"linear_trend_transform = LinearTrendTransform(in_column='target')\n",
"linear_trend_transform = LinearTrendTransform(in_column=\"target\")\n",
"\n",
"pipeline_sma = Pipeline(model=model_sma,\n",
" horizon=HORIZON,\n",
" transforms=[linear_trend_transform])"
"pipeline_sma = Pipeline(model=model_sma, horizon=HORIZON, transforms=[linear_trend_transform])"
]
},
{
Expand Down
22 changes: 9 additions & 13 deletions examples/backtest.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@
"source": [
"img = plt.imread(\"./assets/backtest/backtest.jpg\")\n",
"plt.figure(figsize=(15, 10))\n",
"plt.axis('off')\n",
"plt.axis(\"off\")\n",
"_ = plt.imshow(img)"
]
},
Expand Down Expand Up @@ -198,7 +198,7 @@
"source": [
"df = pd.read_csv(\"./data/example_dataset.csv\")\n",
"df[\"timestamp\"] = pd.to_datetime(df[\"timestamp\"])\n",
"df = df.loc[df.segment == 'segment_a']\n",
"df = df.loc[df.segment == \"segment_a\"]\n",
"df.head()"
]
},
Expand All @@ -218,7 +218,7 @@
"outputs": [],
"source": [
"df = TSDataset.to_dataset(df)\n",
"ts = TSDataset(df, freq='D')"
"ts = TSDataset(df, freq=\"D\")"
]
},
{
Expand Down Expand Up @@ -249,7 +249,7 @@
}
],
"source": [
"ts.plot(segments=['segment_a'])"
"ts.plot(segments=[\"segment_a\"])"
]
},
{
Expand All @@ -275,9 +275,9 @@
"metadata": {},
"outputs": [],
"source": [
"horizon = 31 # Set the horizon for predictions\n",
"model = ProphetModel() # Create a model\n",
"transforms = [] #A list of transforms - we will not use any of them"
"horizon = 31 # Set the horizon for predictions\n",
"model = ProphetModel() # Create a model\n",
"transforms = [] # A list of transforms - we will not use any of them"
]
},
{
Expand Down Expand Up @@ -463,9 +463,7 @@
}
],
"source": [
"metrics_df, forecast_df, fold_info_df = pipeline.backtest(\n",
" ts=ts, metrics=[MAE(), MSE(), SMAPE()]\n",
")"
"metrics_df, forecast_df, fold_info_df = pipeline.backtest(ts=ts, metrics=[MAE(), MSE(), SMAPE()])"
]
},
{
Expand Down Expand Up @@ -931,9 +929,7 @@
],
"source": [
"metrics_df, forecast_df, fold_info_df = pipeline.backtest(\n",
" ts=ts,\n",
" metrics=[MAE(), MSE(), SMAPE()],\n",
" aggregate_metrics=True\n",
" ts=ts, metrics=[MAE(), MSE(), SMAPE()], aggregate_metrics=True\n",
")"
]
},
Expand Down
Loading

0 comments on commit 9c17a2d

Please sign in to comment.