Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature] Enable own NN configuration for lagged regressors #1154

Merged
merged 2 commits into from
Feb 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions neuralprophet/configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,8 @@ class LaggedRegressor:
as_scalar: bool
normalize: Union[bool, str]
n_lags: int
num_hidden_layers: Optional[int]
d_hidden: Optional[int]

def __post_init__(self):
if self.reg_lambda is not None:
Expand Down
12 changes: 12 additions & 0 deletions neuralprophet/forecaster.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,8 @@ def add_lagged_regressor(
self,
names: Union[str, List[str]],
n_lags: Union[int, np_types.Literal["auto", "scalar"]] = "auto",
num_hidden_layers: Optional[int] = None,
d_hidden: Optional[int] = None,
regularization: Optional[float] = None,
normalize: Union[bool, str] = "auto",
):
Expand All @@ -463,12 +465,20 @@ def add_lagged_regressor(
previous regressors time steps to use as input in the predictor (covar order)
if ``auto``, time steps will be equivalent to the AR order (default)
if ``scalar``, all the regressors will only use last known value as input
num_hidden_layers : int
number of hidden layers to include in Lagged-Regressor-Net (defaults to same configuration as AR-Net)
d_hidden : int
dimension of hidden layers of the Lagged-Regressor-Net. Ignored if ``num_hidden_layers`` == 0.
regularization : float
optional scale for regularization strength
normalize : bool
optional, specify whether this regressor will benormalized prior to fitting.
if ``auto``, binary regressors will not be normalized.
"""
if num_hidden_layers is None:
num_hidden_layers = self.config_model.num_hidden_layers
if d_hidden is None:
d_hidden = self.config_model.d_hidden
if n_lags == 0 or n_lags is None:
n_lags = 0
log.warning(
Expand Down Expand Up @@ -502,6 +512,8 @@ def add_lagged_regressor(
normalize=normalize,
as_scalar=only_last_value,
n_lags=n_lags,
num_hidden_layers=num_hidden_layers,
d_hidden=d_hidden,
)
return self

Expand Down
10 changes: 5 additions & 5 deletions neuralprophet/time_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,17 +303,17 @@ def __init__(
for covar in self.config_lagged_regressors.keys():
covar_net = nn.ModuleList()
d_inputs = self.config_lagged_regressors[covar].n_lags
for i in range(self.num_hidden_layers):
for i in range(self.config_lagged_regressors[covar].num_hidden_layers):
d_hidden = (
max(
4,
round(
(self.config_lagged_regressors[covar].n_lags + n_forecasts)
/ (2.0 * (num_hidden_layers + 1))
/ (2.0 * (self.config_lagged_regressors[covar].num_hidden_layers + 1))
),
)
if d_hidden is None
else d_hidden
if self.config_lagged_regressors[covar].d_hidden is None
else self.config_lagged_regressors[covar].d_hidden
)
covar_net.append(nn.Linear(d_inputs, d_hidden, bias=True))
d_inputs = d_hidden
Expand Down Expand Up @@ -499,7 +499,7 @@ def covariate(self, lags, name):
Forecast component of dims (batch, n_forecasts)
"""
x = lags
for i in range(self.num_hidden_layers + 1):
for i in range(self.config_lagged_regressors[name].num_hidden_layers + 1):
if i > 0:
x = nn.functional.relu(x)
x = self.covar_nets[name][i](x)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def test_lag_reg():
)
df["A"] = df["y"].rolling(7, min_periods=1).mean()
df["B"] = df["y"].rolling(30, min_periods=1).mean()
m = m.add_lagged_regressor(names="A")
m = m.add_lagged_regressor(names="A", n_lags=12, num_hidden_layers=4, d_hidden=16)
m = m.add_lagged_regressor(names="B")
metrics_df = m.fit(df, freq="D")
future = m.make_future_dataframe(df, n_historic_predictions=10)
Expand Down