Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Aadi/track local queues #20

Merged
merged 41 commits into from
Apr 22, 2024
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
e4386d8
fix issue tracking across gaps of no instances
Nov 2, 2023
91d8b41
add notebooks folder to gitignore
Nov 2, 2023
51200a0
add `notebooks` folder to git ignore(temp)
Nov 9, 2023
b37d199
Add `Frame` and `Instance` objects for data manipulation
Nov 9, 2023
a63dbf5
implement track_local_queues
Nov 10, 2023
02051d3
implement track_local_queues for tracking inference
Nov 10, 2023
d19bdf4
add tests for data_structures
Nov 10, 2023
25dd3bf
fix formatting + doc strings
Nov 10, 2023
8a2801b
fix typos
Nov 10, 2023
055d6e1
get training to work with Frames and Instances objects
Nov 10, 2023
6f83300
update test yaml
Nov 10, 2023
908f679
lint test_models
Nov 10, 2023
2d31a21
fix doc strings
Dec 4, 2023
ba4913d
fix typo, use gt track from predicted dataset not pred track
Dec 4, 2023
c0f9bcd
fix edgecase leading to error when no instances are detected
Dec 4, 2023
a03cca9
fix small typos + docstrings
Dec 4, 2023
7c4c645
fix docstrings + small typo
Dec 4, 2023
db5d83a
create checkpoint dir manually, use better checkpoint names
aaprasad Feb 6, 2024
2fcb057
add poses and easy conversion to sleap label objects
aaprasad Feb 6, 2024
5975bc1
add flexibility to visualization such as control over sizes as well a…
aaprasad Feb 6, 2024
6589d45
add random seed for reproducibility especially for chunk subsampling
aaprasad Feb 6, 2024
305b0bf
extend CTC dataset to multiple videos. fix small for loop variable du…
aaprasad Feb 6, 2024
0b647fa
handle missing detections case
aaprasad Feb 6, 2024
7b59bbc
use correct bounds for instance retention limits. Add max tracks
aaprasad Feb 6, 2024
dfd3691
fix gpu memory leak
aaprasad Feb 6, 2024
8d9ed9d
move train metrics to its own section in config
aaprasad Apr 19, 2024
a46290d
add ability to store and visualize track scores
aaprasad Apr 19, 2024
e7a22f7
don't subsample chunks with replacement
aaprasad Apr 19, 2024
1d5e1b7
add debug statements to tracker
aaprasad Apr 19, 2024
67d879e
only clear gpu after each epoch instead of each step
aaprasad Apr 19, 2024
5e53341
lint
aaprasad Apr 19, 2024
e37fc35
lint + fix docstrings
aaprasad Apr 19, 2024
4c61fbe
fix typing in cell tracking dataset,
aaprasad Apr 19, 2024
52d9ae8
lint
aaprasad Apr 19, 2024
8a560c8
add open-cv-headless to environment.yml
aaprasad Apr 19, 2024
96048aa
use conda-forge opencv instead of opencv-headless
aaprasad Apr 19, 2024
d7d2d09
* commit suggestions from * @coderabbit's reviews
aaprasad Apr 19, 2024
2eef70f
add @coderabbit's suggestions
aaprasad Apr 22, 2024
c351008
lint
aaprasad Apr 22, 2024
4fed899
use batch_size when logging metrics to prevent lightning "cant infer …
aaprasad Apr 22, 2024
33b97ad
Merge branch 'aadi/to_slp' into aadi/track-local-queues
aaprasad Apr 22, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ target/

# Jupyter Notebook
.ipynb_checkpoints
notebooks/

# IPython
profile_default/
Expand Down
31 changes: 16 additions & 15 deletions biogtr/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
return f"Config({self.cfg})"

def __str__(self):
"""String representation of config class."""
"""Return a string representation of config class."""
return f"Config({self.cfg})"

def set_hparams(self, hparams: dict) -> bool:
Expand Down Expand Up @@ -92,20 +92,21 @@

def get_gtr_runner(self):
"""Get lightning module for training, validation, and inference."""

tracker_params = self.cfg.tracker
optimizer_params = self.cfg.optimizer
scheduler_params = self.cfg.scheduler
loss_params = self.cfg.loss
gtr_runner_params = self.cfg.runner

if self.cfg.model.ckpt_path is not None and self.cfg.model.ckpt_path != "":
model = GTRRunner.load_from_checkpoint(self.cfg.model.ckpt_path,
tracker_cfg = tracker_params,
train_metrics=self.cfg.runner.train_metrics,
val_metrics=self.cfg.runner.val_metrics,
test_metrics=self.cfg.runner.test_metrics)

model = GTRRunner.load_from_checkpoint(

Check warning on line 102 in biogtr/config.py

View check run for this annotation

Codecov / codecov/patch

biogtr/config.py#L102

Added line #L102 was not covered by tests
self.cfg.model.ckpt_path,
tracker_cfg=tracker_params,
train_metrics=self.cfg.runner.train_metrics,
val_metrics=self.cfg.runner.val_metrics,
test_metrics=self.cfg.runner.test_metrics,
)

else:
model_params = self.cfg.model
model = GTRRunner(
Expand Down Expand Up @@ -186,13 +187,13 @@
torch.multiprocessing.set_sharing_strategy("file_system")
else:
pin_memory = False

return torch.utils.data.DataLoader(
dataset=dataset,
batch_size=1,
pin_memory=pin_memory,
collate_fn=dataset.no_batching_fn,
**dataloader_params
**dataloader_params,
)

def get_optimizer(self, params: Iterable) -> torch.optim.Optimizer:
Expand Down Expand Up @@ -282,7 +283,7 @@
callbacks: list[pl.callbacks.Callback],
logger: pl.loggers.WandbLogger,
devices: int = 1,
accelerator: str = None
accelerator: str = None,
) -> pl.Trainer:
"""Getter for the lightning trainer.

Expand All @@ -297,12 +298,12 @@
A lightning Trainer with specified params
"""
if "accelerator" not in self.cfg.trainer:
self.set_hparams({'trainer.accelerator': accelerator})
self.set_hparams({"trainer.accelerator": accelerator})

Check warning on line 301 in biogtr/config.py

View check run for this annotation

Codecov / codecov/patch

biogtr/config.py#L301

Added line #L301 was not covered by tests
if "devices" not in self.cfg.trainer:
self.set_hparams({'trainer.devices': devices})
self.set_hparams({"trainer.devices": devices})

Check warning on line 303 in biogtr/config.py

View check run for this annotation

Codecov / codecov/patch

biogtr/config.py#L303

Added line #L303 was not covered by tests

trainer_params = self.cfg.trainer

return pl.Trainer(
callbacks=callbacks,
logger=logger,
Expand Down
Loading
Loading