Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

change default logger to dedicated one #1064

Merged
merged 1 commit into from
Mar 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pl_examples/basic_examples/lightning_module_template.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""
Example template for defining a system
"""
import logging as log
import os
from argparse import ArgumentParser
from collections import OrderedDict
Expand All @@ -14,6 +13,7 @@
from torch.utils.data import DataLoader
from torchvision.datasets import MNIST

from pytorch_lightning import _logger as log
from pytorch_lightning.core import LightningModule


Expand Down
3 changes: 3 additions & 0 deletions pytorch_lightning/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
# We are not importing the rest of the scikit during the build
# process, as it may not be compiled yet
else:
from logging import getLogger
_logger = getLogger("lightning")

from .core import LightningModule
from .trainer import Trainer
from .callbacks import Callback
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/callbacks/early_stopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@

"""

import logging as log
import warnings

import numpy as np

from .base import Callback
from pytorch_lightning import _logger as log


class EarlyStopping(Callback):
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
Automatically save model checkpoints during training.
"""

import logging as log
import os
import shutil
import warnings
Expand All @@ -14,6 +13,7 @@
import numpy as np

from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning import _logger as log


class ModelCheckpoint(Callback):
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import collections
import inspect
import logging as log
import os
import warnings
from abc import ABC, abstractmethod
Expand All @@ -15,6 +14,7 @@
from torch.optim.optimizer import Optimizer
from torch.utils.data import DataLoader

from pytorch_lightning import _logger as log
from pytorch_lightning.core.grads import GradInformation
from pytorch_lightning.core.hooks import ModelHooks
from pytorch_lightning.core.memory import ModelSummary
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/core/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"""

import gc
import logging as log
import os
import subprocess
from subprocess import PIPE
Expand All @@ -15,6 +14,8 @@

import pytorch_lightning as pl

from pytorch_lightning import _logger as log


class ModelSummary(object):

Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/core/saving.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import csv
import logging as log
import os
from argparse import Namespace
from typing import Union, Dict, Any

from pytorch_lightning import _logger as log


class ModelIO(object):

Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/loggers/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
-------------
"""

import logging as log
from argparse import Namespace
from typing import Optional, Dict, Union, Any

Expand All @@ -27,8 +26,9 @@
import torch
from torch import is_tensor

from pytorch_lightning import _logger as log
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only
from pytorch_lightning.utilities.debugging import MisconfigurationException
from .base import LightningLoggerBase, rank_zero_only


class CometLogger(LightningLoggerBase):
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/loggers/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def any_lightning_module_function_or_hook(...):
self.logger.experiment.whatever_ml_flow_supports(...)

"""
import logging as log
from argparse import Namespace
from time import time
from typing import Optional, Dict, Any, Union
Expand All @@ -34,7 +33,8 @@ def any_lightning_module_function_or_hook(...):
raise ImportError('You want to use `mlflow` logger which is not installed yet,'
' install it with `pip install mlflow`.')

from .base import LightningLoggerBase, rank_zero_only
from pytorch_lightning import _logger as log
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only


class MLFlowLogger(LightningLoggerBase):
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/loggers/neptune.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
NeptuneLogger
--------------
"""
import logging as log
from argparse import Namespace
from typing import Optional, List, Dict, Any, Union, Iterable

Expand All @@ -20,6 +19,7 @@
import torch
from torch import is_tensor

from pytorch_lightning import _logger as log
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only


Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/loggers/trains.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ def any_lightning_module_function_or_hook(...):

"""

import logging as log
from argparse import Namespace
from pathlib import Path
from typing import Any, Dict, Optional, Union
Expand All @@ -38,7 +37,8 @@ def any_lightning_module_function_or_hook(...):
raise ImportError('You want to use `TRAINS` logger which is not installed yet,'
' install it with `pip install trains`.')

from .base import LightningLoggerBase, rank_zero_only
from pytorch_lightning import _logger as log
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only


class TrainsLogger(LightningLoggerBase):
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/profiler/profiler.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import cProfile
import io
import logging as log
import pstats
import time
from abc import ABC, abstractmethod
Expand All @@ -9,6 +8,8 @@

import numpy as np

from pytorch_lightning import _logger as log


class BaseProfiler(ABC):
"""
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/trainer/auto_mix_precision.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging as log
from abc import ABC

from pytorch_lightning import _logger as log

try:
from apex import amp
except ImportError:
Expand Down
3 changes: 1 addition & 2 deletions pytorch_lightning/trainer/distrib_data_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,15 +113,14 @@ def train_fx(trial_hparams, cluster_manager, _):

"""

import logging as log
import os
import re
import warnings
from abc import ABC, abstractmethod
from typing import Union

import torch

from pytorch_lightning import _logger as log
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.utilities.debugging import MisconfigurationException

Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/distrib_parts.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,12 +334,12 @@

"""

import logging as log
import os
from abc import ABC, abstractmethod

import torch

from pytorch_lightning import _logger as log
from pytorch_lightning.overrides.data_parallel import (
LightningDistributedDataParallel,
LightningDataParallel,
Expand Down
5 changes: 2 additions & 3 deletions pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import inspect
import logging as log
import os
import sys
import warnings
Expand All @@ -14,8 +13,8 @@
from torch.utils.data import DataLoader
from tqdm.auto import tqdm

from pytorch_lightning.callbacks import Callback
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping
from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping, Callback
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.profiler import Profiler, PassThroughProfiler
from pytorch_lightning.profiler.profiler import BaseProfiler
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/training_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@

"""

import logging as log
import os
import re
import signal
Expand All @@ -102,6 +101,7 @@
import torch
import torch.distributed as torch_distrib

from pytorch_lightning import _logger as log
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.overrides.data_parallel import (
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/training_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,6 @@ def training_step(self, batch, batch_idx):
"""

import copy
import logging as log
import warnings
from abc import ABC, abstractmethod
from typing import Callable
Expand All @@ -131,6 +130,7 @@ def training_step(self, batch, batch_idx):
import numpy as np
from torch.utils.data import DataLoader

from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.loggers import LightningLoggerBase
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/training_tricks.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import logging as log
import math
from abc import ABC, abstractmethod

import torch

from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks import GradientAccumulationScheduler

EPSILON = 1e-6
Expand Down