Skip to content

Commit

Permalink
forecasting operator: bug fix + automlx user specified metric based t…
Browse files Browse the repository at this point in the history
…uning (#300)
  • Loading branch information
codeloop authored Sep 11, 2023
2 parents 585abad + 9f829a3 commit cef98e8
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 5 deletions.
12 changes: 11 additions & 1 deletion ads/opctl/operator/lowcode/forecast/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,14 @@ class SupportedMetrics(str, metaclass=ExtendedEnumMeta):
SMAPE = "smape"


MAX_COLUMNS_AUTOMLX = 15
AUTOMLX_METRIC_MAP = {
"smape": "neg_sym_mean_abs_percent_error",
"mape": "neg_sym_mean_abs_percent_error",
"mase": "neg_mean_abs_scaled_error",
"mae": "neg_mean_absolute_error",
"mse": "neg_mean_squared_error",
"rmse": "neg_root_mean_squared_error",
}

MAX_COLUMNS_AUTOMLX = 15
DEFAULT_TRIALS = 10
9 changes: 7 additions & 2 deletions ads/opctl/operator/lowcode/forecast/model/automlx.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import pandas as pd
import numpy as np
from ads.common.decorator.runtime_dependency import runtime_dependency
from ads.opctl.operator.lowcode.forecast.const import AUTOMLX_METRIC_MAP
from sktime.forecasting.model_selection import temporal_train_test_split
from ads.opctl import logger

Expand All @@ -29,7 +30,8 @@ class AutoMLXOperatorModel(ForecastOperatorBaseModel):
),
)
def _build_model(self) -> pd.DataFrame:
import automl
from automl import init
init(engine="local", check_deprecation_warnings=False)

full_data_dict = self.full_data_dict

Expand Down Expand Up @@ -59,7 +61,10 @@ def _build_model(self) -> pd.DataFrame:
"" if y_train.index.is_monotonic else "NOT",
"monotonic.",
)
model = automl.Pipeline(task="forecasting", n_algos_tuned=n_algos_tuned)
model = automl.Pipeline(task="forecasting",
n_algos_tuned=n_algos_tuned,
score_metric=AUTOMLX_METRIC_MAP.get(self.spec.metric,
"neg_sym_mean_abs_percent_error"))
model.fit(X=y_train.drop(target, axis=1), y=pd.DataFrame(y_train[target]))
logger.info("Selected model: {}".format(model.selected_model_))
logger.info(
Expand Down
4 changes: 3 additions & 1 deletion ads/opctl/operator/lowcode/forecast/model/neuralprophet.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import numpy as np
import optuna
import pandas as pd
from neuralprophet import NeuralProphet
from torch import Tensor
from torchmetrics.regression import (
MeanAbsoluteError,
Expand All @@ -24,6 +25,7 @@
)
from ads.opctl import logger

from ...forecast.const import DEFAULT_TRIALS
from .. import utils
from .base_model import ForecastOperatorBaseModel

Expand Down Expand Up @@ -166,7 +168,7 @@ def objective(trial):
)
study.optimize(
objective,
n_trials=self.spec.tuning.n_trials if self.spec.tunning else 10,
n_trials=self.spec.tuning.n_trials if self.spec.tuning else DEFAULT_TRIALS,
n_jobs=-1,
)

Expand Down
3 changes: 2 additions & 1 deletion ads/opctl/operator/lowcode/forecast/model/prophet.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

from ads.opctl import logger

from ...forecast.const import DEFAULT_TRIALS
from .. import utils
from .base_model import ForecastOperatorBaseModel

Expand Down Expand Up @@ -144,7 +145,7 @@ def objective(trial):
)
study.optimize(
objective,
n_trials=self.spec.tuning.n_trials if self.spec.tunning else 10,
n_trials=self.spec.tuning.n_trials if self.spec.tuning else DEFAULT_TRIALS,
n_jobs=-1,
)

Expand Down

0 comments on commit cef98e8

Please sign in to comment.