Fix bug of low_cpu_mem_usage in finetune.py.

main
苏阳 1 year ago
parent ab109ced9f
commit 73b34d4a9d

@ -291,9 +291,9 @@ def train():
): ):
raise RuntimeError("ZeRO3 is incompatible with LoRA when finetuning on base model.") raise RuntimeError("ZeRO3 is incompatible with LoRA when finetuning on base model.")
model_load_kwargs = {} model_load_kwargs = {
if deepspeed.is_deepspeed_zero3_enabled(): 'low_cpu_mem_usage': not deepspeed.is_deepspeed_zero3_enabled(),
model_load_kwargs['low_cpu_mem_usage'] = False }
# Set RoPE scaling factor # Set RoPE scaling factor
config = transformers.AutoConfig.from_pretrained( config = transformers.AutoConfig.from_pretrained(

Loading…
Cancel
Save