Skip to content

Commit

Permalink
Remove pytorch-lightning (#11306)
Browse files Browse the repository at this point in the history
* update import in docs

Signed-off-by: Maanu Grover <maanug@nvidia.com>

* update import in tutorials

Signed-off-by: Maanu Grover <maanug@nvidia.com>

* remove pl requirement

Signed-off-by: Maanu Grover <maanug@nvidia.com>

* missed import updates

Signed-off-by: Maanu Grover <maanug@nvidia.com>

---------

Signed-off-by: Maanu Grover <maanug@nvidia.com>
  • Loading branch information
maanug-nv authored Nov 18, 2024
1 parent 168c3e5 commit bc9e6a8
Show file tree
Hide file tree
Showing 36 changed files with 38 additions and 39 deletions.
2 changes: 1 addition & 1 deletion docs/source/nlp/punctuation_and_capitalization.rst
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ An example of a config file is
- trainer config
-
- Parameters of
`pytorch_lightning.Trainer <https://pytorch-lightning.readthedocs.io/en/latest/common/trainer.html#trainer-class-api>`_.
`lightning.pytorch.Trainer <https://pytorch-lightning.readthedocs.io/en/latest/common/trainer.html#trainer-class-api>`_.
* - **exp_manager**
- exp manager config
-
Expand Down
2 changes: 1 addition & 1 deletion docs/source/starthere/fundamentals.rst
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ Below is an example training script for our ``ExampleEncDecModel`` model. We hig
:linenos:
:emphasize-lines: 10, 11, 12
import pytorch_lightning as pl
import lightning.pytorch as pl
from nemo.collections.path_to_model_class import ExampleEncDecModel
from nemo.core.config import hydra_runner
Expand Down
4 changes: 2 additions & 2 deletions nemo/collections/llm/recipes/phi3_mini_4k_instruct.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@

from typing import Callable, Optional

import lightning.pytorch as pl
import nemo_run as run
import pytorch_lightning as pl
import torch
from lightning.pytorch.callbacks.callback import Callback
from megatron.core.distributed import DistributedDataParallelConfig
from pytorch_lightning.callbacks.callback import Callback

from nemo import lightning as nl
from nemo.collections.llm.api import finetune, pretrain
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
import re

import torch
from lightning.pytorch.trainer.trainer import Trainer
from omegaconf.dictconfig import DictConfig
from pytorch_lightning.trainer.trainer import Trainer
from torch import Tensor

from nemo.collections.common.tokenizers.sentencepiece_tokenizer import SentencePieceTokenizer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@
import numpy as np
import soundfile as sf
import torch
from lightning.pytorch.trainer.trainer import Trainer
from omegaconf import OmegaConf
from omegaconf.dictconfig import DictConfig
from omegaconf.omegaconf import open_dict
from pytorch_lightning.trainer.trainer import Trainer

import nemo.collections.asr as nemo_asr
from nemo.collections.asr.metrics.wer import word_error_rate
Expand Down
1 change: 0 additions & 1 deletion requirements/requirements_lightning.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ fiddle
hydra-core>1.3,<=1.3.2
lightning>2.2.1
omegaconf<=2.3
pytorch-lightning>2.2.1
torchmetrics>=0.11.0
transformers>=4.45.0
wandb
Expand Down
2 changes: 1 addition & 1 deletion tutorials/01_NeMo_Models.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -984,7 +984,7 @@
"id": "0TsfmCYthMux"
},
"source": [
"import pytorch_lightning as ptl\n",
"import lightning.pytorch as ptl\n",
"from nemo.core import ModelPT\n",
"from omegaconf import OmegaConf"
],
Expand Down
4 changes: 2 additions & 2 deletions tutorials/asr/ASR_CTC_Language_Finetuning.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1292,7 +1292,7 @@
},
"source": [
"import torch\n",
"import pytorch_lightning as ptl\n",
"import lightning.pytorch as ptl\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
Expand Down Expand Up @@ -2088,7 +2088,7 @@
},
"source": [
"import torch\n",
"import pytorch_lightning as ptl\n",
"import lightning.pytorch as ptl\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/ASR_TTS_Tutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@
"import tempfile\n",
"\n",
"from omegaconf import OmegaConf\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"import torch\n",
"from tqdm.auto import tqdm\n",
"import wget\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/ASR_with_NeMo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,7 @@
"id": "GUfR6tAK0k2u"
},
"source": [
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"trainer = pl.Trainer(devices=1, accelerator='gpu', max_epochs=50)"
],
"execution_count": null,
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/ASR_with_Subword_Tokenization.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -765,7 +765,7 @@
"id": "3rslHEKeq9qy"
},
"source": [
"import pytorch_lightning as pl\r\n",
"import lightning.pytorch as pl\r\n",
"trainer = pl.Trainer(devices=1, accelerator='gpu', max_epochs=50)"
],
"execution_count": null,
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/ASR_with_Transducers.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -754,7 +754,7 @@
"outputs": [],
"source": [
"import torch\n",
"from pytorch_lightning import Trainer\n",
"from lightning.pytorch import Trainer\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/Confidence_Ensembles.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@
"# check out https://github.com/NVIDIA/NeMo/blob/main/tutorials/asr/ASR_CTC_Language_Finetuning.ipynb\n",
"# to learn more about finetuning NeMo ASR models\n",
"from omegaconf import open_dict, OmegaConf\n",
"from pytorch_lightning import Trainer\n",
"from lightning.pytorch import Trainer\n",
"\n",
"from nemo.collections.asr.models.ctc_bpe_models import EncDecCTCModelBPE\n",
"import nemo.utils.exp_manager as exp_manager\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/Multilang_ASR.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1527,7 +1527,7 @@
"outputs": [],
"source": [
"import torch\n",
"import pytorch_lightning as ptl"
"import lightning.pytorch as ptl"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/Self_Supervised_Pre_Training.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,7 @@
},
"outputs": [],
"source": [
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf\n",
"\n",
"from nemo.collections.asr.models.ssl_models import SpeechEncDecSelfSupervisedModel\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/Speech_Commands.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -408,7 +408,7 @@
},
"source": [
"import torch\n",
"import pytorch_lightning as pl"
"import lightning.pytorch as pl"
],
"execution_count": null,
"outputs": []
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/Transducers_with_HF_Datasets.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@
"outputs": [],
"source": [
"import torch\n",
"from pytorch_lightning import Trainer\n",
"from lightning.pytorch import Trainer\n",
"\n",
"if torch.cuda.is_available():\n",
" accelerator = 'gpu'\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/Voice_Activity_Detection.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@
"outputs": [],
"source": [
"import torch\n",
"import pytorch_lightning as pl"
"import lightning.pytorch as pl"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/asr_adapters/ASR_with_Adapters.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@
"source": [
"import torch\n",
"from omegaconf import OmegaConf, open_dict\n",
"from pytorch_lightning import Trainer\n",
"from lightning.pytorch import Trainer\n",
"\n",
"import nemo.collections.asr as nemo_asr"
],
Expand Down
2 changes: 1 addition & 1 deletion tutorials/asr/asr_adapters/Multi_Task_Adapters.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -908,7 +908,7 @@
"\n",
"from torch.utils.data import DataLoader, Dataset\n",
"\n",
"import pytorch_lightning as L\n",
"import lightning.pytorch as L\n",
"\n",
"from transformers import T5Tokenizer, T5ForConditionalGeneration\n",
"\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@
"import IPython.display as ipd\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"import soundfile as sf\n",
"\n",
"from omegaconf import OmegaConf, open_dict\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
"import IPython.display as ipd\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"import soundfile as sf\n",
"from pathlib import Path\n",
"from omegaconf import OmegaConf, open_dict\n",
Expand Down Expand Up @@ -981,4 +981,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}
2 changes: 1 addition & 1 deletion tutorials/nlp/ITN_with_Thutmose_Tagger.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@
"\n",
"import wget \n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf\n",
"import pandas as pd"
]
Expand Down
2 changes: 1 addition & 1 deletion tutorials/nlp/Punctuation_and_Capitalization.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
"import os\n",
"import wget \n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@
"import os\n",
"import wget\n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
Expand Down
2 changes: 1 addition & 1 deletion tutorials/nlp/Relation_Extraction-BioMegatron.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
"import os\n",
"import wget\n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
Expand Down
2 changes: 1 addition & 1 deletion tutorials/nlp/Text_Classification_Sentiment_Analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
"import os\n",
"import wget \n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
Expand Down
2 changes: 1 addition & 1 deletion tutorials/nlp/Token_Classification-BioMegatron.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
"import os\n",
"import wget \n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
"import os\n",
"import wget \n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import OmegaConf"
],
"execution_count": null,
Expand Down
2 changes: 1 addition & 1 deletion tutorials/nlp/Zero_Shot_Intent_Recognition.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@
"from nemo.utils import logging\n",
"from omegaconf import OmegaConf\n",
"import pandas as pd\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"import torch\n",
"import wget "
]
Expand Down
2 changes: 1 addition & 1 deletion tutorials/nlp/lora.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@
"source": [
"from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy\n",
"import torch\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from nemo.collections.nlp.parts.megatron_trainer_builder import MegatronTrainerBuilder\n",
"\n",
"# let's modify some trainer configs\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/speaker_tasks/Speaker_Diarization_Training.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -777,7 +777,7 @@
"metadata": {},
"outputs": [],
"source": [
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from nemo.collections.asr.models import EncDecDiarLabelModel\n",
"from nemo.utils.exp_manager import exp_manager\n",
"\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@
"outputs": [],
"source": [
"import torch\n",
"import pytorch_lightning as pl"
"import lightning.pytorch as pl"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1636,7 +1636,7 @@
"outputId": "67209ee3-5161-40dc-a179-83d8219c3d71"
},
"source": [
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"from omegaconf import DictConfig\n",
"import copy\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/tts/Tacotron2_Training.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@
"Let's take a look at the tacotron2.py file\n",
"\n",
"```python\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"\n",
"from nemo.collections.common.callbacks import LogEpochTimeCallback\n",
"from nemo.collections.tts.models import Tacotron2Model\n",
Expand Down
2 changes: 1 addition & 1 deletion tutorials/tts/Vits_Training.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@
"Let's take a look at the vits.py file\n",
"\n",
"```python\n",
"import pytorch_lightning as pl\n",
"import lightning.pytorch as pl\n",
"\n",
"from nemo.collections.tts.models.vits import VitsModel\n",
"from nemo.core.config import hydra_runner\n",
Expand Down

0 comments on commit bc9e6a8

Please sign in to comment.