Skip to content

Commit

Permalink
reduce change
Browse files Browse the repository at this point in the history
  • Loading branch information
ourownstory committed Aug 30, 2024
1 parent 2ff07d2 commit a4bd3f2
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions neuralprophet/utils_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,17 +271,17 @@ def find_learning_rate(model, loader, trainer, train_epochs):
# Configure the learning rate finder args
batches_per_epoch = len(loader)
main_training_total_steps = train_epochs * batches_per_epoch
# main_training_total_steps is around 1e3 to 1e6 -> num_training 100 to 400
num_training = 100 + int(np.log10(1 + main_training_total_steps / 1000) * 100)
# main_training_total_steps is around 1e3 to 1e6 -> num_training 100 to 200
num_training = 100 + int(np.log10(1 + main_training_total_steps / 1000) * 30)
if batches_per_epoch < num_training:
log.warning(
f"Learning rate finder: The number of batches per epoch ({batches_per_epoch}) is too small than the required number \
for the learning rate finder ({num_training}). The results might not be optimal."
)
# num_training = num_batches
lr_finder_args = {
"min_lr": 1e-5,
"max_lr": 1.0,
"min_lr": 1e-6,
"max_lr": 10.0,
"num_training": num_training,
"early_stop_threshold": None,
"mode": "exponential",
Expand Down

0 comments on commit a4bd3f2

Please sign in to comment.