|
|
#!/bin/bash |
|
|
|
|
|
export OMINI_CONFIG=./config/glue.yaml |
|
|
export TOKENIZERS_PARALLELISM=true |
|
|
|
|
|
export CPATH=$CPATH:$CUDA_INCLUDE_PATH |
|
|
export CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:$CUDA_INCLUDE_PATH |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
export WANDB_PROJECT="DeBERTaV3-GLUE-Test" |
|
|
|
|
|
export OMP_NUM_THREADS=1 |
|
|
export MKL_NUM_THREADS=1 |
|
|
export OPENBLAS_NUM_THREADS=1 |
|
|
export NUMEXPR_NUM_THREADS=1 |
|
|
|
|
|
date +"%F %T" |
|
|
|
|
|
MODEL_LRS=("1e-4") |
|
|
CLS_LRS=("2e-3") |
|
|
DROPOUT_RATES=("0.1") |
|
|
TEXT=("oft" "boft" "hra" "loco") |
|
|
|
|
|
STEPS=2000 |
|
|
EPOCHS=11 |
|
|
for m_lr in "${MODEL_LRS[@]}"; do |
|
|
for c_lr in "${CLS_LRS[@]}"; do |
|
|
for drop_out in "${DROPOUT_RATES[@]}"; do |
|
|
|
|
|
echo ">>> Params: model_lr=$m_lr, cls_lr=$c_lr, dropout=$drop_out, step=$STEPS, epoch=$EPOCHS" |
|
|
python -m src.test \ |
|
|
--config_path $OMINI_CONFIG --trainer_args.output_dir "./glue_testYY" --run_text 'oft' \ |
|
|
--rotation_adapter_config.num_rotations 1 --rotation_adapter_config.r 6 \ |
|
|
--trainer_args.gradient_accumulation_steps 1 \ |
|
|
--glue.is_debug False --rotation_adapter_config.drop_out "$drop_out" \ |
|
|
--glue.task_name qnli --trainer_args.metric_for_best_model accuracy \ |
|
|
--trainer_args.num_train_epochs $EPOCHS --trainer_args.max_steps=405 --trainer_args.warmup_steps 200 \ |
|
|
--glue.model_lr "$m_lr" --glue.cls_lr "$c_lr" \ |
|
|
--trainer_args.logging_step $STEPS --trainer_args.eval_step $STEPS --trainer_args.save_steps $STEPS \ |
|
|
--trainer_args.report_to none \ |
|
|
--glue.max_seq_length 512 \ |
|
|
--trainer_args.per_device_train_batch_size 64 --trainer_args.per_device_eval_batch_size 128 \ |
|
|
--trainer_args.eval_strategy '"no"' \ |
|
|
--trainer_args.load_best_model_at_end False \ |
|
|
--trainer_args.save_strategy '"no"' |
|
|
|
|
|
|
|
|
done |
|
|
done |
|
|
done |
|
|
|
|
|
date +"%F %T" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|