Skip to content

Commit a4f09ad

Browse files
Alexander SnorkinAlexander Snorkin
authored andcommitted
Merge branch 'feature/5275_clean_progress_bar_print' of https://github.com/asnorkin/pytorch-lightning into feature/5275_clean_progress_bar_print
2 parents ba65102 + d560388 commit a4f09ad

File tree

87 files changed

+1423
-480
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

87 files changed

+1423
-480
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
5454
- Added missing val/test hooks in `LightningModule` ([#5467](https://github.com/PyTorchLightning/pytorch-lightning/pull/5467))
5555

5656

57+
- `Recall` and `Precision` metrics (and their functional counterparts `recall` and `precision`) can now be generalized to Recall@K and Precision@K with the use of `top_k` parameter ([#4842](https://github.com/PyTorchLightning/pytorch-lightning/pull/4842))
58+
59+
60+
5761
### Changed
5862

5963
- Changed `stat_scores` metric now calculates stat scores over all classes and gains new parameters, in line with the new `StatScores` metric ([#4839](https://github.com/PyTorchLightning/pytorch-lightning/pull/4839))

docs/source/metrics.rst

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -382,8 +382,8 @@ the possible class labels are 0, 1, 2, 3, etc. Below are some examples of differ
382382
ml_target = torch.tensor([[0, 1, 1], [1, 0, 0], [0, 0, 0]])
383383

384384

385-
Using the ``is_multiclass`` parameter
386-
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
385+
Using the is_multiclass parameter
386+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
387387

388388
In some cases, you might have inputs which appear to be (multi-dimensional) multi-class
389389
but are actually binary/multi-label - for example, if both predictions and targets are
@@ -602,14 +602,14 @@ roc [func]
602602
precision [func]
603603
~~~~~~~~~~~~~~~~
604604

605-
.. autofunction:: pytorch_lightning.metrics.functional.classification.precision
605+
.. autofunction:: pytorch_lightning.metrics.functional.precision
606606
:noindex:
607607

608608

609609
precision_recall [func]
610610
~~~~~~~~~~~~~~~~~~~~~~~
611611

612-
.. autofunction:: pytorch_lightning.metrics.functional.classification.precision_recall
612+
.. autofunction:: pytorch_lightning.metrics.functional.precision_recall
613613
:noindex:
614614

615615

@@ -623,7 +623,7 @@ precision_recall_curve [func]
623623
recall [func]
624624
~~~~~~~~~~~~~
625625

626-
.. autofunction:: pytorch_lightning.metrics.functional.classification.recall
626+
.. autofunction:: pytorch_lightning.metrics.functional.recall
627627
:noindex:
628628

629629
select_topk [func]

pyproject.toml

Lines changed: 0 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -23,18 +23,6 @@ known_first_party = [
2323
"tests",
2424
]
2525
skip_glob = [
26-
# todo
27-
"pytorch_lightning/accelerators/*",
28-
29-
30-
# todo
31-
"pytorch_lightning/callbacks/*",
32-
33-
34-
# todo
35-
"pytorch_lightning/cluster_environments/*",
36-
37-
3826
# todo
3927
"pytorch_lightning/core/*",
4028

@@ -43,28 +31,8 @@ skip_glob = [
4331
"pytorch_lightning/distributed/*",
4432

4533

46-
# todo
47-
"pytorch_lightning/metrics/*",
48-
49-
50-
# todo
51-
"pytorch_lightning/overrides/*",
52-
53-
5434
# todo
5535
"pytorch_lightning/plugins/*",
56-
57-
58-
# todo
59-
"pytorch_lightning/profiler/*",
60-
61-
62-
# todo
63-
"pytorch_lightning/trainer/*",
64-
65-
66-
# todo
67-
"pytorch_lightning/tuner/*",
6836
]
6937
profile = "black"
7038
line_length = 120

pytorch_lightning/accelerators/__init__.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,15 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14+
from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401
1415
from pytorch_lightning.accelerators.cpu_accelerator import CPUAccelerator # noqa: F401
1516
from pytorch_lightning.accelerators.ddp2_accelerator import DDP2Accelerator # noqa: F401
1617
from pytorch_lightning.accelerators.ddp_accelerator import DDPAccelerator # noqa: F401
17-
from pytorch_lightning.accelerators.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401
18+
from pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401
1819
from pytorch_lightning.accelerators.ddp_cpu_spawn_accelerator import DDPCPUSpawnAccelerator # noqa: F401
20+
from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401
21+
from pytorch_lightning.accelerators.ddp_spawn_accelerator import DDPSpawnAccelerator # noqa: F401
1922
from pytorch_lightning.accelerators.dp_accelerator import DataParallelAccelerator # noqa: F401
2023
from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator # noqa: F401
21-
from pytorch_lightning.accelerators.tpu_accelerator import TPUAccelerator # noqa: F401
2224
from pytorch_lightning.accelerators.horovod_accelerator import HorovodAccelerator # noqa: F401
23-
from pytorch_lightning.accelerators.ddp_hpc_accelerator import DDPHPCAccelerator # noqa: F401
24-
from pytorch_lightning.accelerators.ddp_cpu_hpc_accelerator import DDPCPUHPCAccelerator # noqa: F401
25-
from pytorch_lightning.accelerators.accelerator import Accelerator # noqa: F401
25+
from pytorch_lightning.accelerators.tpu_accelerator import TPUAccelerator # noqa: F401

pytorch_lightning/accelerators/accelerator_connector.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,19 @@
1515

1616
import torch
1717

18-
from pytorch_lightning.utilities import _HOROVOD_AVAILABLE, DeviceType, DistributedType
1918
from pytorch_lightning import _logger as log
2019
from pytorch_lightning import accelerators
2120
from pytorch_lightning.accelerators.accelerator import Accelerator
2221
from pytorch_lightning.cluster_environments.slurm_environment import SLURMEnvironment
2322
from pytorch_lightning.cluster_environments.torchelastic_environment import TorchElasticEnvironment
24-
from pytorch_lightning.utilities import device_parser, rank_zero_only, _TPU_AVAILABLE
23+
from pytorch_lightning.utilities import (
24+
_HOROVOD_AVAILABLE,
25+
_TPU_AVAILABLE,
26+
device_parser,
27+
DeviceType,
28+
DistributedType,
29+
rank_zero_only,
30+
)
2531
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn
2632
from pytorch_lightning.utilities.exceptions import MisconfigurationException
2733

pytorch_lightning/accelerators/cpu_accelerator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14-
from typing import Any, Optional, Union, Callable
14+
from typing import Any, Callable, Optional, Union
1515

1616
import torch
1717

pytorch_lightning/accelerators/horovod_accelerator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
from contextlib import ExitStack
15-
from typing import Any, Optional, Union, Callable
15+
from typing import Any, Callable, Optional, Union
1616

1717
import torch
1818
from torch.optim.lr_scheduler import _LRScheduler

pytorch_lightning/callbacks/gpu_stats_monitor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
from typing import Dict, List, Tuple
2828

2929
from pytorch_lightning.callbacks.base import Callback
30-
from pytorch_lightning.utilities import rank_zero_only, DeviceType
30+
from pytorch_lightning.utilities import DeviceType, rank_zero_only
3131
from pytorch_lightning.utilities.exceptions import MisconfigurationException
3232
from pytorch_lightning.utilities.parsing import AttributeDict
3333

pytorch_lightning/cluster_environments/slurm_environment.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
import os
1616
import re
17+
1718
from pytorch_lightning import _logger as log
1819
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment
1920

pytorch_lightning/cluster_environments/torchelastic_environment.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,10 @@
1313
# limitations under the License.
1414

1515
import os
16+
1617
from pytorch_lightning import _logger as log
17-
from pytorch_lightning.utilities import rank_zero_warn
1818
from pytorch_lightning.cluster_environments.cluster_environment import ClusterEnvironment
19+
from pytorch_lightning.utilities import rank_zero_warn
1920

2021

2122
class TorchElasticEnvironment(ClusterEnvironment):

0 commit comments

Comments
 (0)