diff --git a/utils/loggers/__init__.py b/utils/loggers/__init__.py index 8af5c402d5ee..7679ee70f176 100644 --- a/utils/loggers/__init__.py +++ b/utils/loggers/__init__.py @@ -47,6 +47,7 @@ def __init__(self, save_dir=None, weights=None, opt=None, hyp=None, logger=None, 'metrics/precision', 'metrics/recall', 'metrics/mAP_0.5', 'metrics/mAP_0.5:0.95', # metrics 'val/box_loss', 'val/obj_loss', 'val/cls_loss', # val loss 'x/lr0', 'x/lr1', 'x/lr2'] # params + self.best_keys = ['best/epoch', 'best/precision', 'best/recall', 'best/mAP_0.5', 'best/mAP_0.5:0.95',] for k in LOGGERS: setattr(self, k, None) # init empty logger dictionary self.csv = True # always log to csv @@ -125,6 +126,10 @@ def on_fit_epoch_end(self, vals, epoch, best_fitness, fi): self.tb.add_scalar(k, v, epoch) if self.wandb: + if best_fitness == fi: + best_results = [epoch] + vals[3:7] + for i, name in enumerate(self.best_keys): + self.wandb.wandb_run.summary[name] = best_results[i] # log best results in the summary self.wandb.log(x) self.wandb.end_epoch(best_result=best_fitness == fi)