diff --git a/.gitignore b/.gitignore index 6dcd794..70099fe 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,9 @@ code/simclr-pytorch-reefs/evaluation/__pycache__ code/simclr-pytorch-reefs/evaluation/wandb/ code/simclr-pytorch-reefs/evaluation/model_states_i2map_simclr scratch/baseline +code/simclr-pytorch-reefs/evaluation/embeddings/raw_embeddings/ code/simclr-pytorch-reefs/evaluation/embeddings/raw_embeddings +code/simclr-pytorch-reefs/evaluation/embeddings/log_metrics/ diff --git a/code/simclr-pytorch-reefs/evaluation/batchsize_sweep.sh b/code/simclr-pytorch-reefs/evaluation/batchsize_sweep.sh new file mode 100755 index 0000000..4eb7a6f --- /dev/null +++ b/code/simclr-pytorch-reefs/evaluation/batchsize_sweep.sh @@ -0,0 +1,6 @@ +#!/bin/bash +for batch_size in 16 32 64 128 256; do + ./fully_train_resnet_all.sh $batch_size +done + +# trying 8 again. then, completed 16 and 32 for batch size sweep. so continue from 64 \ No newline at end of file diff --git a/code/simclr-pytorch-reefs/evaluation/fully_train_resnet_all_augs.sh b/code/simclr-pytorch-reefs/evaluation/fully_train_resnet_all_augs.sh new file mode 100755 index 0000000..8644606 --- /dev/null +++ b/code/simclr-pytorch-reefs/evaluation/fully_train_resnet_all_augs.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# conserve this order, with bermuda first +configs=("config_australia.yml" "config_bermuda.yml" "config_kenya.yml" "config_florida.yml" "config_french_polynesia.yml" "config_indonesia.yml") + +# Set the desired batch_size and num_epochs IF ADDING HERE, ALSO ADD TO THE FOR LOOP BELOW +batch_size=64 +num_epochs=100 +learning_rate=0.001 +train_percent=0.8 # may want to change by dataset +starting_weights="ImageNet" # "ReefCLR" or "ImageNet" - should always be ImageNet for fully training! +finetune=False # True = train only final layer, False = train all layers +transform=True +device="cuda:1" +wandb_project="Fully_trained_ResNet_augmented" + +# to add: starting weights, learning rate etc +# to do: name the wandb somthing sensible. +# +for config in "${configs[@]}"; do + # Print the current config being processed + echo "Processing configuration file: $config" + + # Use sed to modify the batch_size and num_epochs in the config file + sed -i "s/batch_size: [0-9]*/batch_size: $batch_size/" multiple_config_runs/$config + sed -i "s/num_epochs: [0-9]*/num_epochs: $num_epochs/" multiple_config_runs/$config + sed -i "s/learning_rate: .*/learning_rate: $learning_rate/" multiple_config_runs/$config + sed -i "s/train_percent: .*/train_percent: $train_percent/" multiple_config_runs/$config + sed -i "s/starting_weights: .*/starting_weights: $starting_weights/" multiple_config_runs/$config + sed -i "s/finetune: .*/finetune: $finetune/" multiple_config_runs/$config + sed -i "s/transform: .*/transform: $transform/" multiple_config_runs/$config + sed -i "s/device: .*/device: $device/" multiple_config_runs/$config + sed -i "s/wandb_project: .*/wandb_project: $wandb_project/" multiple_config_runs/$config + + + python train_eval.py --config multiple_config_runs/$config +done \ No newline at end of file diff --git a/remove.txt b/remove.txt new file mode 100644 index 0000000..ef328e7 --- /dev/null +++ b/remove.txt @@ -0,0 +1,5 @@ +code/notebooks/embedding_extractor/embeddings/ +home/ben/reef-audio-representation-learning/code/simclr-pytorch-reefs/evaluation/embeddings/raw_embeddings + +home/ben/reef-audio-representation-learning/scratch/baseline +scratch/baseline \ No newline at end of file