From b97c3d113242be44756a5340329b7e7503cef1b3 Mon Sep 17 00:00:00 2001 From: leej3 Date: Mon, 4 Dec 2023 08:42:24 -0500 Subject: [PATCH] move hint to correct location --- ignite/handlers/lr_finder.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ignite/handlers/lr_finder.py b/ignite/handlers/lr_finder.py index a224f673ae1..2b3e58c05ae 100644 --- a/ignite/handlers/lr_finder.py +++ b/ignite/handlers/lr_finder.py @@ -162,18 +162,18 @@ def _log_lr_and_loss(self, trainer: Engine, output_transform: Callable, smooth_f "if output of the engine is torch.Tensor, then " "it must be 0d torch.Tensor or 1d torch.Tensor with 1 element, " f"but got torch.Tensor of shape {loss.shape}." - "You may wish to use the output_transform kwarg with the attach method e.g.\n" - """ - lr_finder = FastaiLRFinder() - with lr_finder.attach(trainer, output_transform=lambda x:x["train_loss"]) as trainer_with_lr_finder: - trainer_with_lr_finder.run(dataloader_train) - """ ) else: raise TypeError( "output of the engine should be of type float or 0d torch.Tensor " "or 1d torch.Tensor with 1 element, " f"but got output of type {type(loss).__name__}" + "You may wish to use the output_transform kwarg with the attach method e.g.\n" + """ + lr_finder = FastaiLRFinder() + with lr_finder.attach(trainer, output_transform=lambda x:x["train_loss"]) as trainer_with_lr_finder: + trainer_with_lr_finder.run(dataloader_train) + """ ) loss = idist.all_reduce(loss) lr = self._lr_schedule.get_param()