export OMINI_CONFIG=./config/commonsense.yaml #echo $OMINI_CONFIG export TOKENIZERS_PARALLELISM=true # CUDA Include (/cuda.h) CUDA_INCLUDE_PATH="/home/work/miniconda3/envs/allm/include" # 3. Add into CPATH & CPLUS_INCLUDE_PATH (C/C++ compiler) export CPATH=$CPATH:$CUDA_INCLUDE_PATH export CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:$CUDA_INCLUDE_PATH # echo "CPATH is set to: $CPATH" # echo "CPLUS_INCLUDE_PATH is set to: $CPLUS_INCLUDE_PATH" export WANDB_PROJECT="Llama2_7B_FT_Math40k_2" export OMP_NUM_THREADS=1 export MKL_NUM_THREADS=1 export OPENBLAS_NUM_THREADS=1 export NUMEXPR_NUM_THREADS=1 date +"%F %T" # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exp_init/run_ex01" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 --run_text "init|kaim_out_u=v" # sleep 5 # echo "1st exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exp_init/run_ex02" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 --run_text "init|kaim_out_u=v(ratio)" # sleep 5 # echo "2nd exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex03" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 2 --rotation_adapter_config.r 8 # sleep 5 # echo "3rd exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex04" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 4 --rotation_adapter_config.r 4 # sleep 5 # echo "4th exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex05" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 2 --rotation_adapter_config.r 8 # sleep 5 # echo "5th exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex06" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 4 --rotation_adapter_config.r 4 # sleep 5 # echo "6th exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_exps7" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 # sleep 5 # echo "7th exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex08" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 # sleep 5 # echo "8th exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex09" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 16 --rotation_adapter_config.r 1 # sleep 5 # echo "9th exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex10" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 8 --rotation_adapter_config.r 2 # sleep 5 # echo "10 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex11" --trainer_args.learning_rate=1e-2 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 # sleep 5 # echo "11 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex12" --trainer_args.learning_rate=1e-2 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 --run_text 'u=v,def' # sleep 5 # echo "12 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex13" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 --run_text 'u=vkaim' # sleep 5 # echo "13 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex14" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 --run_text 'a,b,def' # sleep 5 # echo "14 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex15" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 4 --rotation_adapter_config.r 4 # sleep 5 # echo "15 exp finishes" # date +"%F %T" # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex17" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 2.0 --trainer_args.eval_steps 500 --data.dataset_split train[:41023] --data.split_ratio 0.02493 \ # --run_text "dropout|fix_token" # sleep 5 # echo "15 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex18" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --trainer_args.eval_steps 500 --data.dataset_split train[:41023] --data.split_ratio 0.02493 \ # --run_text "dropout|fix_token" # sleep 5 # echo "158exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex19" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --trainer_args.eval_steps 500 --data.dataset_split train[:41023] --data.split_ratio 0.02493 \ # --run_text "dropout|fix_token" # sleep 5 # echo "19 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex20" --trainer_args.learning_rate=8e-4 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --trainer_args.eval_steps 500 --data.dataset_split train[:41023] --data.split_ratio 0.02493 \ # --run_text "dropout|fix_token" # sleep 5 # echo "20 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathR \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex21" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 2.0 --trainer_args.eval_steps 500 --data.dataset_split train[:41023] --data.split_ratio 0.02493 \ # --run_text "dropout|2ep|1e3" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # back to official 40k # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex22" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 2.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|1e3|40k" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex23" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|1e3|40k" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex24" --trainer_args.learning_rate=1e-2 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|1e2|40k" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex25" --trainer_args.learning_rate=2e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|2e3|40k" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex26" --trainer_args.learning_rate=5e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|5e3|40k" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex27" --trainer_args.learning_rate=8e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|8e3|40k" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex28" --trainer_args.learning_rate=2e-2 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|2e2|40k" # sleep 5 # echo "21 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex29" --trainer_args.learning_rate=5e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 2.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|initu=v=0.01|40k" --trainer_args.per_device_train_batch_size 48 # sleep 5 # echo "29 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex30" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 2.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|initu=v=0.01|40k" --trainer_args.per_device_train_batch_size 48 # sleep 5 # echo "29 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex31" --trainer_args.learning_rate=5e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|initu=v=0.01|40k" --trainer_args.per_device_train_batch_size 48 # sleep 5 # echo "29 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex32" --trainer_args.learning_rate=1e-3 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|initu=v=0.01|40k" --trainer_args.per_device_train_batch_size 48 # sleep 5 # echo "29 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run # accelerate launch --main_process_port 41353 -m src.ft_mathQ \ # --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex33" --trainer_args.learning_rate=1e-2 \ # --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ # --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ # --run_text "drop0.1|2ep|initu=v=0.01|40k" --trainer_args.per_device_train_batch_size 48 # sleep 5 # echo "29 exp finishes" # date +"%F %T" # wandb sync wandb/latest-run accelerate launch --main_process_port 41353 -m src.ft_mathQ \ --config_path $OMINI_CONFIG --trainer_args.output_dir "./exps/run_ex34" --trainer_args.learning_rate=2e-2 \ --rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 16 \ --trainer_args.num_train_epochs 3.0 --data.dataset_split train \ --run_text "drop0.1|2ep|initu=v=0.01|40k" --trainer_args.per_device_train_batch_size 48 sleep 5 echo "29 exp finishes" date +"%F %T" wandb sync wandb/latest-run bash scripts/merge.sh