From 188a8d3953b62693bb206a1a19376ed65296e590 Mon Sep 17 00:00:00 2001 From: Ayush Chaurasia Date: Mon, 21 Jun 2021 17:30:25 +0530 Subject: [PATCH] [x]W&B: Don't resume transfer learning runs (#3604) * Allow config cahnge * Allow val change in wandb config * Don't resume transfer learning runs * Add entity in log dataset --- train.py | 1 + utils/wandb_logging/log_dataset.py | 2 ++ utils/wandb_logging/wandb_utils.py | 3 +-- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/train.py b/train.py index 19bd97faca1f..67d835d60691 100644 --- a/train.py +++ b/train.py @@ -89,6 +89,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary # W&B opt.hyp = hyp # add hyperparameters run_id = torch.load(weights).get('wandb_id') if weights.endswith('.pt') and os.path.isfile(weights) else None + run_id = run_id if opt.resume else None # start fresh run if transfer learning wandb_logger = WandbLogger(opt, save_dir.stem, run_id, data_dict) loggers['wandb'] = wandb_logger.wandb if loggers['wandb']: diff --git a/utils/wandb_logging/log_dataset.py b/utils/wandb_logging/log_dataset.py index f45a23011f15..3a9a3d79fe01 100644 --- a/utils/wandb_logging/log_dataset.py +++ b/utils/wandb_logging/log_dataset.py @@ -18,6 +18,8 @@ def create_dataset_artifact(opt): parser.add_argument('--data', type=str, default='data/coco128.yaml', help='data.yaml path') parser.add_argument('--single-cls', action='store_true', help='train as single-class dataset') parser.add_argument('--project', type=str, default='YOLOv5', help='name of W&B Project') + parser.add_argument('--entity', default=None, help='W&B entity') + opt = parser.parse_args() opt.resume = False # Explicitly disallow resume check for dataset upload job diff --git a/utils/wandb_logging/wandb_utils.py b/utils/wandb_logging/wandb_utils.py index 43b4c3d04e8e..d82633c7e2f6 100644 --- a/utils/wandb_logging/wandb_utils.py +++ b/utils/wandb_logging/wandb_utils.py @@ -126,8 +126,7 @@ def __init__(self, opt, name, run_id, data_dict, job_type='Training'): if not opt.resume: wandb_data_dict = self.check_and_upload_dataset(opt) if opt.upload_dataset else data_dict # Info useful for resuming from artifacts - self.wandb_run.config.opt = vars(opt) - self.wandb_run.config.data_dict = wandb_data_dict + self.wandb_run.config.update({'opt': vars(opt), 'data_dict': data_dict}, allow_val_change=True) self.data_dict = self.setup_training(opt, data_dict) if self.job_type == 'Dataset Creation': self.data_dict = self.check_and_upload_dataset(opt)