Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update defaults for WandbLogger's run name and project name #14145

Merged
merged 11 commits into from
Aug 17, 2022
12 changes: 11 additions & 1 deletion src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- The `Trainer.{fit,validate,test,predict,tune}` methods now raise a useful error message if the input is not a `LightningModule` ([#13892](https://github.com/Lightning-AI/lightning/pull/13892))


- Raised a `MisconfigurationException` if batch transfer hooks are overriden with `IPUAccelerator` ([13961](https://github.com/Lightning-AI/lightning/pull/13961))
- Raised a `MisconfigurationException` if batch transfer hooks are overriden with `IPUAccelerator` ([#13961](https://github.com/Lightning-AI/lightning/pull/13961))


- Updated compatibility for LightningLite to run with the latest DeepSpeed 0.7.0 ([13967](https://github.com/Lightning-AI/lightning/pull/13967))
Expand All @@ -34,6 +34,12 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Replaced the unwrapping logic in strategies with direct access to unwrapped `LightningModule` ([#13738](https://github.com/Lightning-AI/lightning/pull/13738))


- The `WandbLogger.name` property no longer returns the name of the experiment, and instead returns the project's name ([#14145](https://github.com/Lightning-AI/lightning/pull/14145))


- The default project name in `WandbLogger` is now "lightning_logs" ([#14145](https://github.com/Lightning-AI/lightning/pull/14145))


### Deprecated

- Deprecated `LightningDeepSpeedModule` ([#14000](https://github.com/Lightning-AI/lightning/pull/14000))
Expand Down Expand Up @@ -101,9 +107,13 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Avoided requiring the FairScale package to use precision with the fsdp native strategy ([#14092](https://github.com/Lightning-AI/lightning/pull/14092))


- Fixed an issue in which the default name for a run in `WandbLogger` would be set to the project name instead of a randomly generated string ([#14145](https://github.com/Lightning-AI/lightning/pull/14145))


- Fixed not preserving set attributes on `DataLoader` and `BatchSampler` when instantiated inside `*_dataloader` hooks ([#14212](https://github.com/Lightning-AI/lightning/pull/14212))



## [1.7.1] - 2022-08-09

### Fixed
Expand Down
13 changes: 7 additions & 6 deletions src/pytorch_lightning/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def __init__(
id: Optional[str] = None,
anonymous: Optional[bool] = None,
version: Optional[str] = None,
project: Optional[str] = None,
project: str = "lightning_logs",
log_model: Union[str, bool] = False,
experiment: Union[Run, RunDisabled, None] = None,
prefix: str = "",
Expand Down Expand Up @@ -297,7 +297,7 @@ def __init__(
self._checkpoint_callback: Optional["ReferenceType[Checkpoint]"] = None
# set wandb init arguments
self._wandb_init: Dict[str, Any] = dict(
name=name or project,
name=name,
project=project,
id=version or id,
dir=save_dir,
Expand All @@ -306,6 +306,7 @@ def __init__(
)
self._wandb_init.update(**kwargs)
# extract parameters
self._project = self._wandb_init.get("project")
self._save_dir = self._wandb_init.get("dir")
self._name = self._wandb_init.get("name")
self._id = self._wandb_init.get("id")
Expand Down Expand Up @@ -450,13 +451,13 @@ def save_dir(self) -> Optional[str]:

@property
def name(self) -> Optional[str]:
rohitgr7 marked this conversation as resolved.
Show resolved Hide resolved
"""Gets the name of the experiment.
"""The project name of this experiment.

Returns:
The name of the experiment if the experiment exists else the name given to the constructor.
The name of the project the current experiment belongs to. This name is not the same as `wandb.Run`'s
name. To access wandb's internal experiment name, use ``logger.experiment.name`` instead.
"""
# don't create an experiment if we don't have one
return self._experiment.name if self._experiment else self._name
return self._project

@property
def version(self) -> Optional[str]:
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/loggers/test_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ def on_train_batch_start(self, trainer, pl_module, batch, batch_idx):


@pytest.mark.parametrize("logger_class", ALL_LOGGER_CLASSES_WO_NEPTUNE_WANDB)
@RunIf(skip_windows=True, skip_hanging_spawn=True)
@RunIf(skip_windows=True)
def test_logger_created_on_rank_zero_only(tmpdir, monkeypatch, logger_class):
"""Test that loggers get replaced by dummy loggers on global rank > 0."""
_patch_comet_atexit(monkeypatch)
Expand Down
20 changes: 14 additions & 6 deletions tests/tests_pytorch/loggers/test_wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,16 @@
from tests_pytorch.helpers.utils import no_warning_call


@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock)
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_project_name(*_):
logger = WandbLogger()
assert logger.name == "lightning_logs"

logger = WandbLogger(project="project")
assert logger.name == "project"


@mock.patch("pytorch_lightning.loggers.wandb.Run", new=mock.Mock)
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_logger_init(wandb, monkeypatch):
Expand All @@ -48,7 +58,7 @@ def test_wandb_logger_init(wandb, monkeypatch):
wandb.init.reset_mock()
WandbLogger(project="test_project").experiment
wandb.init.assert_called_once_with(
name="test_project", dir=None, id=None, project="test_project", resume="allow", anonymous=None
name=None, dir=None, id=None, project="test_project", resume="allow", anonymous=None
)

# test wandb.init and setting logger experiment externally
Expand Down Expand Up @@ -91,7 +101,6 @@ def test_wandb_logger_init(wandb, monkeypatch):
logger.watch("model", "log", 10, False)
wandb.init().watch.assert_called_once_with("model", log="log", log_freq=10, log_graph=False)

assert logger.name == wandb.init().name
assert logger.version == wandb.init().id


Expand Down Expand Up @@ -140,10 +149,9 @@ def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir):
"""Test that the logger creates the folders and files in the right place."""
monkeypatch.setattr(pytorch_lightning.loggers.wandb, "_WANDB_GREATER_EQUAL_0_12_10", True)
wandb.run = None
logger = WandbLogger(save_dir=str(tmpdir), offline=True)
logger = WandbLogger(project="project", save_dir=str(tmpdir), offline=True)
# the logger get initialized
assert logger.version == wandb.init().id
assert logger.name == wandb.init().name

# mock return values of experiment
wandb.run = None
Expand All @@ -154,7 +162,7 @@ def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir):
_ = logger.experiment

assert logger.version == "1"
assert logger.name == "run_name"
assert logger.name == "project"
assert str(tmpdir) == logger.save_dir
assert not os.listdir(tmpdir)

Expand All @@ -164,7 +172,7 @@ def test_wandb_logger_dirs_creation(wandb, monkeypatch, tmpdir):
assert trainer.log_dir == logger.save_dir
trainer.fit(model)

assert trainer.checkpoint_callback.dirpath == str(tmpdir / "run_name" / version / "checkpoints")
assert trainer.checkpoint_callback.dirpath == str(tmpdir / "project" / version / "checkpoints")
assert set(os.listdir(trainer.checkpoint_callback.dirpath)) == {"epoch=0-step=3.ckpt"}
assert trainer.log_dir == logger.save_dir

Expand Down