use english prompt by default (#12115)

This commit is contained in:
Shaojun Liu 2024-09-24 17:40:50 +08:00 committed by GitHub
parent 5d63aef60b
commit 657889e3e4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -435,12 +435,14 @@ LLM_URLS = [f"http://localhost:{PORT}/v1/completions" for PORT in [8000]]
MODEL = "/llm/models/" + model_name
MAX_TOKENS = output_length # 修改 MAX_TOKENS 为 output_length
if "Qwen" not in MODEL and "chatglm" not in MODEL:
# print("using Llama PROMPT")
PROMPT = ENGLISH_PROMPT
else:
# print("using Qwen/chatglm PROMPT")
PROMPT = CHINESE_PROMPT
# if "Qwen" not in MODEL and "chatglm" not in MODEL:
# print("using Llama PROMPT")
# PROMPT = ENGLISH_PROMPT
# else:
# print("using Qwen/chatglm PROMPT")
# PROMPT = CHINESE_PROMPT
PROMPT = ENGLISH_PROMPT
# 加载模型的 tokenizer
from transformers import AutoTokenizer