Skip to content

Commit

Permalink
Merge pull request #73 from DIVA-DIA/dev
Browse files Browse the repository at this point in the history
release version 0.1.1
  • Loading branch information
powl7 authored Oct 22, 2021
2 parents df4c28b + 2c4e807 commit f926959
Show file tree
Hide file tree
Showing 124 changed files with 808 additions and 361 deletions.
13 changes: 13 additions & 0 deletions .github/workflows/ci-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,23 @@ jobs:
if: success()
run: |
coverage html
coverage xml
- name: Code coverage results upload
if: success()
uses: actions/upload-artifact@v2
with:
name: code-coverage-report_${{ runner.os }}_py${{ matrix.python-version }}
path: htmlcov/

- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
if: always()
# see: https://github.com/actions/toolkit/issues/399
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: coverage.xml
flags: cpu,pytest
name: Base-coverage
fail_ci_if_error: false
3 changes: 3 additions & 0 deletions conda_env_gpu.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,8 @@ dependencies:
- pytorch=1.8.1
- torchvision=0.9.1
- pandas=1.3.2
- numpy=1.19.2
- pillow=7.2.0
- tqdm=4.60.0
- pip:
- -r requirements.txt
2 changes: 0 additions & 2 deletions configs/callbacks/conf_mat_test_wandb.yaml

This file was deleted.

2 changes: 0 additions & 2 deletions configs/callbacks/conf_mat_val_wandb.yaml

This file was deleted.

2 changes: 0 additions & 2 deletions configs/callbacks/f1_precision_recall_heatmap_wandb.yaml

This file was deleted.

2 changes: 1 addition & 1 deletion configs/datamodule/cb55_10_cropped_datamodule.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ crop_size: 256
num_workers: 4
batch_size: 16
shuffle: True
drop_last_batch: True
drop_last: True
2 changes: 1 addition & 1 deletion configs/datamodule/cb55_cropped_datamodule.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ crop_size: 256
num_workers: 4
batch_size: 16
shuffle: True
drop_last_batch: True
drop_last: True
36 changes: 27 additions & 9 deletions configs/experiment/cb55_full_run_unet.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,27 @@
# python run.py +experiment=exp_example_full

defaults:
- override /loss: crossentropyloss.yaml
- override /trainer: null # override trainer to null so it's not loaded from main config defaults...
- override /task: semantic_segmentation_task.yaml
- override /model/backbone: unet_model.yaml
- override /model/header: identity.yaml
- override /datamodule: null
- override /metric: hisdbiou.yaml
- /plugins: default.yaml
- /task: semantic_segmentation_task.yaml
- /loss: crossentropyloss.yaml
- /metric: hisdbiou.yaml
- /model/backbone: unet_model.yaml
- /model/header: identity.yaml
- /optimizer: adam.yaml
- /callbacks:
- check_compatibility.yaml
- model_checkpoint.yaml
- /logger:
- wandb.yaml # set logger here or use command line (e.g. `python run.py logger=wandb`)
- csv.yaml

# we override default configurations with nulls to prevent them from loading at all
# instead we define all modules and their paths directly in this config,
# so everything is stored in one place for more readibility

train: True
test: True

trainer:
_target_: pytorch_lightning.Trainer
gpus: -1
Expand All @@ -24,6 +33,11 @@ trainer:
max_epochs: 50
precision: 16

task:
confusion_matrix_log_every_n_epoch: 10
confusion_matrix_val: True
confusion_matrix_test: True

datamodule:
_target_: src.datamodules.hisDBDataModule.DIVAHisDBDataModule.DIVAHisDBDataModuleCropped

Expand All @@ -32,14 +46,18 @@ datamodule:
num_workers: 4
batch_size: 16
shuffle: True
drop_last_batch: True
drop_last: True

callbacks:
model_checkpoint:
monitor: "val/hisdbiou"
mode: "max"
filename: ${checkpoint_folder_name}cb55-full-unet
# watch_model:
# log_freq: 1000

logger:
wandb:
name: 'cb55-full-unet'
tags: ["best_model", "USL"]
tags: ["best_model", "USL", "baseline"]
group: 'baseline'
11 changes: 6 additions & 5 deletions configs/experiment/development_baby_unet_cb55_10.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@ defaults:
- /callbacks:
- check_compatibility.yaml
- model_checkpoint.yaml
- conf_mat_val_wandb.yaml
- conf_mat_test_wandb.yaml
- watch_model_wandb.yaml
- f1_precision_recall_heatmap_wandb.yaml
- upload_checkpoint_wandb.yaml
- /logger:
- wandb.yaml # set logger here or use command line (e.g. `python run.py logger=wandb`)
- csv.yaml
Expand All @@ -41,6 +37,11 @@ trainer:
weights_summary: full
precision: 16

task:
confusion_matrix_log_every_n_epoch: 1
confusion_matrix_val: True
confusion_matrix_test: True

datamodule:
_target_: src.datamodules.hisDBDataModule.DIVAHisDBDataModule.DIVAHisDBDataModuleCropped

Expand All @@ -49,7 +50,7 @@ datamodule:
num_workers: 4
batch_size: 16
shuffle: True
drop_last_batch: True
drop_last: True

callbacks:
model_checkpoint:
Expand Down
9 changes: 9 additions & 0 deletions configs/optimizer/adagrad.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# explanation of the different parameters: https://pytorch.org/docs/stable/generated/torch.optim.Adagrad.html#torch.optim.Adagrad

_target_: torch.optim.Adagrad

lr: 1e-3
lr_decay: 0
weight_decay: 0
initial_accumulator_value: 0
eps: 1e-10
10 changes: 10 additions & 0 deletions configs/optimizer/adamax.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# explanation of the different parameters: https://pytorch.org/docs/stable/generated/torch.optim.Adamax.html#torch.optim.Adamax

_target_: torch.optim.Adamax

lr: 1e-3
betas:
- 0.9
- 0.999
eps: 1e-8
weight_decay: 0
11 changes: 11 additions & 0 deletions configs/optimizer/adamw.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# explanation of the different parameters: https://pytorch.org/docs/stable/generated/torch.optim.AdamW.html#torch.optim.AdamW

_target_: torch.optim.AdamW

lr: 1e-3
betas:
- 0.9
- 0.999
eps: 1e-8
weight_decay: 0.01
amsgrad: False
3 changes: 2 additions & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
[pytest]
python_files=tests/*.py
python_files=tests/*.py
filterwarnings = ignore::UserWarning
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ torch==1.8.1
torchvision>=0.9.1
pytorch-lightning>=1.4.4
lightning-bolts>=0.3.2
gym==0.18.0
torchmetrics>=0.5.0

# --------- hydra --------- #
Expand Down
1 change: 1 addition & 0 deletions src/callbacks/model_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ class CheckBackboneHeaderCompatibility(Callback):
def __init__(self):
self.checked = False

@rank_zero_only
def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: Optional[str] = None) -> None:
if self.checked:
return
Expand Down
Loading

0 comments on commit f926959

Please sign in to comment.