Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Minor] Narrow search range of lr-finder, lessen skip-window of lr_suggestion #1643

Merged
merged 2 commits into from
Aug 30, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions neuralprophet/utils_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def smooth_loss_and_suggest(lr_finder, window=10):
)
raise
# get the tuner's default suggestion
suggestion_default = lr_finder.suggestion(skip_begin=20, skip_end=10)
suggestion_default = lr_finder.suggestion(skip_begin=10, skip_end=3)

log.info(f"Learning rate finder ---- default suggestion: {suggestion_default}")
log.info(f"Learning rate finder ---- steepest: {suggestion_steepest}")
Expand Down Expand Up @@ -271,17 +271,17 @@ def find_learning_rate(model, loader, trainer, train_epochs):
# Configure the learning rate finder args
batches_per_epoch = len(loader)
main_training_total_steps = train_epochs * batches_per_epoch
# main_training_total_steps is around 1e3 to 1e6 -> num_training 100 to 400
num_training = 100 + int(np.log10(1 + main_training_total_steps / 1000) * 100)
# main_training_total_steps is around 1e3 to 1e6 -> num_training 100 to 200
num_training = 100 + int(np.log10(1 + main_training_total_steps / 1000) * 30)
if batches_per_epoch < num_training:
log.warning(
f"Learning rate finder: The number of batches per epoch ({batches_per_epoch}) is too small than the required number \
for the learning rate finder ({num_training}). The results might not be optimal."
)
# num_training = num_batches
lr_finder_args = {
"min_lr": 1e-7,
"max_lr": 1e1,
"min_lr": 1e-6,
"max_lr": 10.0,
"num_training": num_training,
"early_stop_threshold": None,
"mode": "exponential",
Expand Down
Loading