From 861086b66df9b04c4e5d4ae3aea8b530b6ab190c Mon Sep 17 00:00:00 2001 From: Yang An Date: Tue, 12 Sep 2023 11:29:32 +0800 Subject: [PATCH] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2210223..757ab30 100644 --- a/README.md +++ b/README.md @@ -183,9 +183,9 @@ ModelScope is an opensource platform for Model-as-a-Service (MaaS), which provid from modelscope import AutoModelForCausalLM, AutoTokenizer from modelscope import GenerationConfig -tokenizer = AutoTokenizer.from_pretrained("qwen/Qwen-7B-Chat", revision = 'v1.0.5',trust_remote_code=True) -model = AutoModelForCausalLM.from_pretrained("qwen/Qwen-7B-Chat", revision = 'v1.0.5',device_map="auto", trust_remote_code=True,fp16 = True).eval() -model.generation_config = GenerationConfig.from_pretrained("Qwen/Qwen-7B-Chat",revision = 'v1.0.5', trust_remote_code=True) # 可指定不同的生成长度、top_p等相关超参 +tokenizer = AutoTokenizer.from_pretrained("qwen/Qwen-7B-Chat", revision='v1.0.5', trust_remote_code=True) +model = AutoModelForCausalLM.from_pretrained("qwen/Qwen-7B-Chat", revision='v1.0.5', device_map="auto", trust_remote_code=True, fp16=True).eval() +model.generation_config = GenerationConfig.from_pretrained("Qwen/Qwen-7B-Chat", revision='v1.0.5', trust_remote_code=True) # 可指定不同的生成长度、top_p等相关超参 response, history = model.chat(tokenizer, "你好", history=None) print(response)