fix prompt format for llama-2 in langchain (#10637)
This commit is contained in:
parent
330d4b4f4b
commit
f6fef09933
2 changed files with 10 additions and 2 deletions
|
|
@ -30,7 +30,11 @@ def main(args):
|
||||||
|
|
||||||
question = args.question
|
question = args.question
|
||||||
model_path = args.model_path
|
model_path = args.model_path
|
||||||
template ="""{question}"""
|
# Below is the prompt format for LLaMa-2 according to
|
||||||
|
# https://huggingface.co/meta-llama/Llama-2-7b-chat-hf
|
||||||
|
# If you're using a different language model,
|
||||||
|
# please adjust the template according to its own model card.
|
||||||
|
template = """<s>[INST] <<SYS>>\n \n<</SYS>>\n\n{question} [/INST]"""
|
||||||
|
|
||||||
prompt = PromptTemplate(template=template, input_variables=["question"])
|
prompt = PromptTemplate(template=template, input_variables=["question"])
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,11 @@ def main(args):
|
||||||
|
|
||||||
question = args.question
|
question = args.question
|
||||||
model_path = args.model_path
|
model_path = args.model_path
|
||||||
template ="""{question}"""
|
# Below is the prompt format for LLaMa-2 according to
|
||||||
|
# https://huggingface.co/meta-llama/Llama-2-7b-chat-hf
|
||||||
|
# If you're using a different language model,
|
||||||
|
# please adjust the template according to its own model card.
|
||||||
|
template = """<s>[INST] <<SYS>>\n \n<</SYS>>\n\n{question} [/INST]"""
|
||||||
|
|
||||||
prompt = PromptTemplate(template=template, input_variables=["question"])
|
prompt = PromptTemplate(template=template, input_variables=["question"])
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue