From 7eac4fde6ed97e6da82e2229b180c03bbcbf8429 Mon Sep 17 00:00:00 2001 From: Junyang Lin Date: Sun, 8 Oct 2023 14:50:08 +0800 Subject: [PATCH] Update finetune_lora_single_gpu.sh --- finetune/finetune_lora_single_gpu.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/finetune/finetune_lora_single_gpu.sh b/finetune/finetune_lora_single_gpu.sh index 5b3388d..4096df6 100644 --- a/finetune/finetune_lora_single_gpu.sh +++ b/finetune/finetune_lora_single_gpu.sh @@ -23,7 +23,7 @@ python finetune.py \ --save_strategy "steps" \ --save_steps 1000 \ --save_total_limit 10 \ - --learning_rate 1e-5 \ + --learning_rate 3e-4 \ --weight_decay 0.1 \ --adam_beta2 0.95 \ --warmup_ratio 0.01 \ @@ -33,4 +33,4 @@ python finetune.py \ --model_max_length 512 \ --lazy_preprocess True \ --gradient_checkpointing \ - --use_lora \ No newline at end of file + --use_lora