diff --git a/dashboard/R/fit_model.R b/dashboard/R/fit_model.R index 0703f3b..8d7487d 100644 --- a/dashboard/R/fit_model.R +++ b/dashboard/R/fit_model.R @@ -365,10 +365,9 @@ update_tune_model_parameters <- function(method, params) { } } - # rimappare il parametro tune_elanet - - mth_params <- getOption("tsf.dashboard.methods_params")[["Elastic Net"]] - tune_params <- clean_chr_inv(params$tune_elanet) + mth_params <- getOption("tsf.dashboard.methods_params")[[method]] + prm_name <- grep("_xx_", names(params), value = TRUE) + tune_params <- clean_chr_inv(params[[prm_name]]) is_to_tune <- mth_params %in% tune_params params_list <- map2(mth_params, is_to_tune, get_tune) |> set_names(mth_params) update_params <- c(params, params_list) @@ -521,7 +520,7 @@ fit_model_tuning <- function( # grid_spec <- generate_grid_spec(method, model_spec, grid_size, seed) # tuning - if (n_folds > 10 | grid_size > 25) { + if (n_folds > 20 | grid_size > 25) { doFuture::registerDoFuture() future::plan(strategy = "multisession", workers = parallelly::availableCores() - 1) message("Number of parallel workers: ", future::nbrOfWorkers()) @@ -531,7 +530,7 @@ fit_model_tuning <- function( resamples = cv_splits, grid = params$grid_size, # grid_spec metrics = modeltime::default_forecast_accuracy_metric_set(), - control = tune::control_grid(save_pred = FALSE) + control = tune::control_grid(save_pred = FALSE, allow_par = TRUE) ) future::plan(strategy = "sequential") diff --git a/dashboard/R/generate_forecast.R b/dashboard/R/generate_forecast.R index a68e237..7af535c 100644 --- a/dashboard/R/generate_forecast.R +++ b/dashboard/R/generate_forecast.R @@ -4,14 +4,12 @@ generate_forecast <- function( ensemble_method = NULL ) { - splits <- timetk::time_series_split( - data, date_var = date, - initial = nrow(data) - n_assess, - assess = n_assess, - cumulative = ifelse(assess_type == "Expanding", TRUE, FALSE) - ) + # initial split + splits <- generate_initial_split(data, n_assess, assess_type) train_tbl <- training(splits) |> select(-id, -frequency) test_tbl <- testing(splits) |> select(-id, -frequency) + + # future split future_tbl <- future_frame(data, .date_var = date, .length_out = n_future) # model summary diff --git a/dashboard/test.R b/dashboard/test.R index c9ae0fa..2350374 100644 --- a/dashboard/test.R +++ b/dashboard/test.R @@ -209,7 +209,7 @@ input <- list( n_folds = 5, metric = "RMSE", grid_size = 10, - tune_rf = c("rf_mtry") + tune_xx_rf = c("rf_mtry") ) data = data_selected diff --git a/dashboard/todo.txt b/dashboard/todo.txt index 2d41675..321778d 100644 --- a/dashboard/todo.txt +++ b/dashboard/todo.txt @@ -11,8 +11,10 @@ Next steps: - documentazione in alto a destra To Do: -- rimappare tune_elanet ecc +- aggiungere rbf_sigma conditional su svm_rbf - finire update_tune_model_parameters con mapping parametri (occhio, non mtry ma rf_mtry) +- testare flusso optimize fino al forecast +- change split in generate_forecast - fare la funzione parse_parameters per ottenere i nomi da mostrare in UI dal parametro - aggiungere metodi di automl (h2o) diff --git a/dashboard/tsf_dashboard.Rmd b/dashboard/tsf_dashboard.Rmd index e1df624..869d178 100644 --- a/dashboard/tsf_dashboard.Rmd +++ b/dashboard/tsf_dashboard.Rmd @@ -1107,7 +1107,7 @@ conditionalPanel( condition = "input.tune_method == 'Elastic Net'", h5("Algorithm hyperparameters to optimize: "), pickerInput( - inputId = "tune_elanet", label = NULL, + inputId = "tune_xx_elanet", label = NULL, # _xx_ need to recognize it easily in update_tune_model_parameters() choices = methods_params_cl[["Elastic Net"]], multiple = TRUE, selected = methods_params_cl[["Elastic Net"]][2], options = list("actions-box" = TRUE)