fix prompt format for llama-2 in langchain (#10637)

This commit is contained in:
Zhicun 2024-04-03 14:17:34 +08:00 committed by GitHub
parent 330d4b4f4b
commit f6fef09933
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 10 additions and 2 deletions

View file

@ -30,7 +30,11 @@ def main(args):
question = args.question question = args.question
model_path = args.model_path model_path = args.model_path
template ="""{question}""" # Below is the prompt format for LLaMa-2 according to
# https://huggingface.co/meta-llama/Llama-2-7b-chat-hf
# If you're using a different language model,
# please adjust the template according to its own model card.
template = """<s>[INST] <<SYS>>\n \n<</SYS>>\n\n{question} [/INST]"""
prompt = PromptTemplate(template=template, input_variables=["question"]) prompt = PromptTemplate(template=template, input_variables=["question"])

View file

@ -30,7 +30,11 @@ def main(args):
question = args.question question = args.question
model_path = args.model_path model_path = args.model_path
template ="""{question}""" # Below is the prompt format for LLaMa-2 according to
# https://huggingface.co/meta-llama/Llama-2-7b-chat-hf
# If you're using a different language model,
# please adjust the template according to its own model card.
template = """<s>[INST] <<SYS>>\n \n<</SYS>>\n\n{question} [/INST]"""
prompt = PromptTemplate(template=template, input_variables=["question"]) prompt = PromptTemplate(template=template, input_variables=["question"])