Skip to content

Commit

Permalink
cosine logging
Browse files Browse the repository at this point in the history
  • Loading branch information
mwalmsley committed Mar 2, 2024
1 parent 09c70ba commit e2c509e
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion zoobot/pytorch/training/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,12 +245,13 @@ def configure_optimizers(self):
logging.info('param groups: {}'.format(len(params)))
for param_group_n, param_group in enumerate(params):
shapes_within_param_group = [p.shape for p in list(param_group['params'])]
logging.info('param group {}: {}'.format(param_group_n, shapes_within_param_group))
logging.debug('param group {}: {}'.format(param_group_n, shapes_within_param_group))
# print('head params to optimize', [p.shape for p in params[0]['params']]) # head only
# print(list(param_group['params']) for param_group in params)
# exit()
# Initialize AdamW optimizer
opt = torch.optim.AdamW(params, weight_decay=self.weight_decay) # lr included in params dict
logging.info('Optimizer ready, configuring scheduler')

if self.cosine_schedule:
# logging.info('Using cosine schedule, warmup for {} epochs, max for {} epochs'.format(self.warmup_epochs, self.max_cosine_epochs))
Expand Down Expand Up @@ -282,6 +283,7 @@ def configure_optimizers(self):
}
}
else:
logging.info('Learning rate scheduler not used')
return opt


Expand Down

0 comments on commit e2c509e

Please sign in to comment.