Skip to content

Commit

Permalink
Use NLPDDPStrategyNotebook in Multitask_Prompt_and_PTuning.ipynb (NVI…
Browse files Browse the repository at this point in the history
…DIA#8061) (NVIDIA#8062)

Signed-off-by: Abhishree <abhishreetm@gmail.com>
Co-authored-by: Abhishree Thittenamane <47577437+athitten@users.noreply.github.com>
  • Loading branch information
github-actions[bot] and athitten authored Dec 21, 2023
1 parent 0f2877a commit 1ca4932
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tutorials/nlp/Multitask_Prompt_and_PTuning.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,7 @@
"source": [
"import torch\n",
"import pytorch_lightning as pl\n",
"from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategy\n",
"from nemo.collections.nlp.parts.nlp_overrides import NLPDDPStrategyNotebook\n",
"from pytorch_lightning.plugins.environments import TorchElasticEnvironment\n",
"\n",
"# let's modify some trainer configs\n",
Expand All @@ -618,7 +618,7 @@
"os.environ[\"RANK\"] = '0'\n",
"os.environ[\"WORLD_SIZE\"] = '1'\n",
"\n",
"strategy = NLPDDPStrategy(find_unused_parameters=False, no_ddp_communication_hook=True)\n",
"strategy = NLPDDPStrategyNotebook(find_unused_parameters=False, no_ddp_communication_hook=True)\n",
"plugins = [TorchElasticEnvironment()]\n",
"trainer = pl.Trainer(plugins= plugins, strategy=strategy, **config.trainer)\n",
"\n",
Expand Down Expand Up @@ -783,4 +783,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

0 comments on commit 1ca4932

Please sign in to comment.