diff --git a/train.py b/train.py index 2852652df82..5b07f97787a 100644 --- a/train.py +++ b/train.py @@ -40,7 +40,13 @@ iter_data_time = time.time() # timer for data loading per iteration epoch_iter = 0 # the number of training iterations in current epoch, reset to 0 every epoch visualizer.reset() # reset the visualizer: make sure it saves the results to HTML at least once every epoch - model.update_learning_rate() # update learning rates in the beginning of every epoch. + + # update learning rates in the beginning of every epoch. + if epoch != opt.epoch_count: + model.update_learning_rate() + else: + print('learning rate %.7f' % model.optimizers[0].param_groups[0]['lr']) + for i, data in enumerate(dataset): # inner loop within one epoch iter_start_time = time.time() # timer for computation per iteration if total_iters % opt.print_freq == 0: