Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BERT pre-training mp fork to spawn #6454

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions examples/nlp/language_modeling/megatron_bert_pretraining.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import torch.multiprocessing as mp
from omegaconf.omegaconf import OmegaConf, open_dict
from pytorch_lightning import Trainer
from pytorch_lightning.plugins.environments import TorchElasticEnvironment
Expand All @@ -28,6 +29,8 @@
from nemo.utils import logging
from nemo.utils.exp_manager import exp_manager

mp.set_start_method("spawn", force=True)


@hydra_runner(config_path="conf", config_name="megatron_bert_config")
def main(cfg) -> None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -643,7 +643,11 @@ def build_pretraining_data_loader(self, dataset, consumed_samples):

# Torch dataloader.
return torch.utils.data.DataLoader(
dataset, batch_sampler=batch_sampler, num_workers=self.cfg.data.num_workers, pin_memory=True,
dataset,
batch_sampler=batch_sampler,
num_workers=self.cfg.data.num_workers,
pin_memory=True,
persistent_workers=True if self.cfg.data.num_workers > 0 else False,
)

def setup_training_data(self, cfg):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -795,7 +795,7 @@ def build_pretraining_data_loader(
batch_sampler=batch_sampler,
num_workers=self.cfg.data.num_workers,
pin_memory=True,
persistent_workers=True,
persistent_workers=True if self.cfg.data.num_workers > 0 else False,
)

def setup(self, stage=None):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,9 @@ def build_virtual_prompt_dataset(
drop_last=drop_last,
num_workers=num_workers,
pin_memory=pin_memory,
persistent_workers=True, # (@adithyare and @eharper) We need this to make spawn=True to work.
persistent_workers=True
if num_workers > 0
else False, # (@adithyare and @eharper) We need this to make spawn=True to work.
)

return dataset, dataloader
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,9 @@ def build_virtual_prompt_dataset(
drop_last=drop_last,
num_workers=num_workers,
pin_memory=pin_memory,
persistent_workers=True, # (@adithyare and @eharper) We need to set this to True to get around issues with spawn=True
persistent_workers=True
if num_workers > 0
else False, # (@adithyare and @eharper) We need to set this to True to get around issues with spawn=True
)
print('build success', len(dataloader), dataset_paths)
return dataset, dataloader
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ def _setup_eval_dataloader_from_config(self, cfg: DictConfig, dataset):
pin_memory=cfg.get("pin_memory", False),
drop_last=cfg.get("drop_last", False),
shuffle=False,
persistent_workers=True,
persistent_workers=True if cfg.get("num_workers", 0) > 0 else False,
)
)

Expand Down Expand Up @@ -592,7 +592,7 @@ def _setup_megatron_dataloader_from_config(self, cfg, dataset, consumed_samples)
collate_fn=collate_fn,
num_workers=cfg.num_workers,
pin_memory=cfg.pin_memory,
persistent_workers=True,
persistent_workers=True if cfg.num_workers > 0 else False,
)

def process_global_batch_for_text_translation_datasets(self, batch):
Expand Down