From e2c509e1e17339bf646e4b6d92ef600732df9cbe Mon Sep 17 00:00:00 2001 From: Mike Walmsley Date: Sat, 2 Mar 2024 09:31:50 -0500 Subject: [PATCH] cosine logging --- zoobot/pytorch/training/finetune.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/zoobot/pytorch/training/finetune.py b/zoobot/pytorch/training/finetune.py index 412752f8..27bc8ec7 100644 --- a/zoobot/pytorch/training/finetune.py +++ b/zoobot/pytorch/training/finetune.py @@ -245,12 +245,13 @@ def configure_optimizers(self): logging.info('param groups: {}'.format(len(params))) for param_group_n, param_group in enumerate(params): shapes_within_param_group = [p.shape for p in list(param_group['params'])] - logging.info('param group {}: {}'.format(param_group_n, shapes_within_param_group)) + logging.debug('param group {}: {}'.format(param_group_n, shapes_within_param_group)) # print('head params to optimize', [p.shape for p in params[0]['params']]) # head only # print(list(param_group['params']) for param_group in params) # exit() # Initialize AdamW optimizer opt = torch.optim.AdamW(params, weight_decay=self.weight_decay) # lr included in params dict + logging.info('Optimizer ready, configuring scheduler') if self.cosine_schedule: # logging.info('Using cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs)) @@ -282,6 +283,7 @@ def configure_optimizers(self): } } else: + logging.info('Learning rate scheduler not used') return opt