Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ourownstory committed Aug 30, 2024
1 parent 451f249 commit 92cee13
Show file tree
Hide file tree
Showing 8 changed files with 26 additions and 19 deletions.
4 changes: 2 additions & 2 deletions neuralprophet/configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -511,8 +511,8 @@ class ConfigLaggedRegressors:
# List of hidden layers for shared NN across LaggedReg. The default value is ``[]``, which initializes no hidden layers.
regressors: OrderedDict[LaggedRegressor] = field(init=False)

Check failure on line 512 in neuralprophet/configure.py

View workflow job for this annotation

GitHub Actions / pyright

Too few type arguments provided for "OrderedDict"; expected 2 but received 1 (reportInvalidTypeArguments)

# def __post_init__(self):
# self.regressors = None
def __post_init__(self):
self.regressors = None


@dataclass
Expand Down
8 changes: 4 additions & 4 deletions neuralprophet/data/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def _reshape_raw_predictions_to_forecst_df(
lagged_components = [
"ar",
]
if config_lagged_regressors.regressors is not None:
if config_lagged_regressors is not None and config_lagged_regressors.regressors is not None:
for name in config_lagged_regressors.regressors.keys():
lagged_components.append(f"lagged_regressor_{name}")
for comp in lagged_components:
Expand Down Expand Up @@ -362,7 +362,7 @@ def _validate_column_name(
if seasons and config_seasonality is not None:
if name in config_seasonality.periods:
raise ValueError(f"Name {name!r} already used for a seasonality.")
if covariates and config_lagged_regressors.regressors is not None:
if covariates and config_lagged_regressors is not None and config_lagged_regressors.regressors is not None:
if name in config_lagged_regressors.regressors.keys():
raise ValueError(f"Name {name!r} already used for an added covariate.")
if regressors and config_regressors.regressors is not None:
Expand Down Expand Up @@ -423,7 +423,7 @@ def _check_dataframe(
model.config_regressors.regressors.pop(reg)
if model.config_regressors.regressors is not None and len(model.config_regressors.regressors) == 0:
model.config_regressors.regressors = None
if model.config_lagged_regressors.regressors is not None:
if model.config_lagged_regressors is not None and model.config_lagged_regressors.regressors is not None:
for reg in lag_regressors_to_remove:
log.warning(f"Removing lagged regressor {reg} because it is not present in the data.")
model.config_lagged_regressors.regressors.pop(reg)
Expand Down Expand Up @@ -528,7 +528,7 @@ def _handle_missing_data(
data_columns = []
if n_lags > 0:
data_columns.append("y")
if config_lagged_regressors.regressors is not None:
if config_lagged_regressors is not None and config_lagged_regressors.regressors is not None:
data_columns.extend(config_lagged_regressors.regressors.keys())
if config_regressors is not None and config_regressors.regressors is not None:
data_columns.extend(config_regressors.regressors.keys())
Expand Down
2 changes: 1 addition & 1 deletion neuralprophet/df_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def get_max_num_lags(n_lags: int, config_lagged_regressors: Optional[ConfigLagge
int
Maximum number of lags between the autoregression lags and the covariates lags.
"""
if config_lagged_regressors.regressors is not None:
if config_lagged_regressors is not None and config_lagged_regressors.regressors is not None:
# log.debug("config_lagged_regressors exists")
return max([n_lags] + [val.n_lags for key, val in config_lagged_regressors.regressors.items()])
else:
Expand Down
2 changes: 2 additions & 0 deletions neuralprophet/forecaster.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,6 +640,8 @@ def add_lagged_regressor(
config_lagged_regressors=self.config_lagged_regressors,
config_regressors=self.config_regressors,
)
if self.config_lagged_regressors.regressors is None:
self.config_lagged_regressors.regressors = OrderedDict()
self.config_lagged_regressors.regressors[name] = configure.LaggedRegressor(
reg_lambda=regularization,
normalize=normalize,
Expand Down
17 changes: 9 additions & 8 deletions neuralprophet/plot_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,9 +190,10 @@ def check_if_configured(m, components, error_flag=False): # move to utils
if "autoregression" in components and not m.config_ar.n_lags > 0:
components.remove("autoregression")
invalid_components.append("autoregression")
if "lagged_regressors" in components and m.config_lagged_regressors is None:
components.remove("lagged_regressors")
invalid_components.append("lagged_regressors")
if "lagged_regressors" in components:
if m.config_lagged_regressors is None or m.config_lagged_regressors.regressors is None:
components.remove("lagged_regressors")
invalid_components.append("lagged_regressors")
if "events" in components and (m.config_events is None and m.config_country_holidays is None):
components.remove("events")
invalid_components.append("events")
Expand All @@ -209,7 +210,7 @@ def check_if_configured(m, components, error_flag=False): # move to utils
return components


def get_valid_configuration( # move to utils
def get_valid_configuration(
m, components=None, df_name=None, valid_set=None, validator=None, forecast_in_focus=None, quantile=0.5
):
"""Validate and adapt the selected components to be plotted.
Expand Down Expand Up @@ -382,7 +383,7 @@ def get_valid_configuration( # move to utils
if "lagged_regressors" in components:
if validator == "plot_components":
if forecast_in_focus is None:
for name in m.config_lagged_regressors.keys():
for name in m.config_lagged_regressors.regressors.keys():
plot_components.append(
{
"plot_name": f'Lagged Regressor "{name}"',
Expand All @@ -392,16 +393,16 @@ def get_valid_configuration( # move to utils
}
)
else:
for name in m.config_lagged_regressors.keys():
for name in m.config_lagged_regressors.regressors.keys():
plot_components.append(
{
"plot_name": f'Lagged Regressor "{name}" ({forecast_in_focus})-ahead',
"comp_name": f"lagged_regressor_{name}{forecast_in_focus}",
}
)
elif validator == "plot_parameters":
for name in m.config_lagged_regressors.keys():
if m.config_lagged_regressors[name].as_scalar:
for name in m.config_lagged_regressors.regressors.keys():
if m.config_lagged_regressors.regressors[name].as_scalar:
lagged_scalar_regressors.append((name, m.model.get_covar_weights()[name].detach().numpy()))
else:
plot_components.append(
Expand Down
8 changes: 6 additions & 2 deletions neuralprophet/time_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,9 @@ def tabularize_univariate_datetime_single_index(
inputs["lags"] = lags

# COVARIATES / LAGGED REGRESSORS: Lagged regressor inputs: analogous to LAGS
if config_lagged_regressors.regressors is not None: # and max_lags > 0:
if (
config_lagged_regressors is not None and config_lagged_regressors.regressors is not None
): # and max_lags > 0:
inputs["covariates"] = self.get_sample_lagged_regressors(
df_tensors=df_tensors, origin_index=origin_index, config_lagged_regressors=config_lagged_regressors
)
Expand Down Expand Up @@ -651,7 +653,9 @@ def create_nan_mask(
valid_origins &= y_lags_valid

# LAGGED REGRESSORS
if config_lagged_regressors.regressors is not None: # and max_lags > 0:
if (
config_lagged_regressors is not None and config_lagged_regressors.regressors is not None
): # and max_lags > 0:
reg_lags_valid = torch.ones(tensor_length, dtype=torch.bool)
for name, lagged_regressor in config_lagged_regressors.regressors.items():
n_reg_lags = lagged_regressor.n_lags
Expand Down
2 changes: 1 addition & 1 deletion neuralprophet/time_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ def get_covar_weights(self, covar_input=None) -> torch.Tensor:
if self.config_lagged_regressors is not None and self.config_lagged_regressors.regressors is not None:
# Accumulate the lags of the covariates
covar_splits = np.add.accumulate(
[covar.n_lags for _, covar in self.config_lagged_regressors.items()][:-1]
[covar.n_lags for _, covar in self.config_lagged_regressors.regressors.items()][:-1]
).tolist()
# If actual covariates are provided, use them to compute the attributions
if covar_input is not None:
Expand Down
2 changes: 1 addition & 1 deletion tests/test_regularization.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def test_regularization_lagged_regressor():
lagged_regressors_config = dict(lagged_regressors)

weights = m.model.get_covar_weights()
for name in m.config_lagged_regressors.keys():
for name in m.config_lagged_regressors.regressors.keys():
weight_average = np.average(weights[name].detach().numpy())

lagged_regressor_weight = lagged_regressors_config[name]
Expand Down

0 comments on commit 92cee13

Please sign in to comment.