From bba73ec9d2652c8440554c3787f63f9815441fa4 Mon Sep 17 00:00:00 2001 From: Yishuo Wang Date: Wed, 30 Aug 2023 15:05:19 +0800 Subject: [PATCH] [LLM] change chatglm native int4 checkpoint name (#8851) --- python/llm/src/bigdl/llm/utils/convert_chatglm.py | 6 ++++-- python/llm/src/bigdl/llm/utils/convert_util.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/python/llm/src/bigdl/llm/utils/convert_chatglm.py b/python/llm/src/bigdl/llm/utils/convert_chatglm.py index 17698024..466287a3 100644 --- a/python/llm/src/bigdl/llm/utils/convert_chatglm.py +++ b/python/llm/src/bigdl/llm/utils/convert_chatglm.py @@ -413,8 +413,8 @@ def main(): help="Model name or path used in AutoModel.from_pretrained", ) parser.add_argument( - "-o", "--save_path", default="chatglm-ggml.bin", - type=Path, help="Path to save the generated GGML model" + "-o", "--save_path", default="", + type=str, help="Path to save the generated GGML model" ) parser.add_argument( "-t", @@ -426,6 +426,8 @@ def main(): ) args = parser.parse_args() + if args.save_path == "": + args.save_path = f"bigdl_llm_chatglm_{args.type.lower()}.bin" ggml_type = GGMLType[args.type.upper()] tokenizer = AutoTokenizer.from_pretrained(args.model_name_or_path, trust_remote_code=True) model = AutoModel.from_pretrained(args.model_name_or_path, trust_remote_code=True) diff --git a/python/llm/src/bigdl/llm/utils/convert_util.py b/python/llm/src/bigdl/llm/utils/convert_util.py index bd223587..8a9e5059 100644 --- a/python/llm/src/bigdl/llm/utils/convert_util.py +++ b/python/llm/src/bigdl/llm/utils/convert_util.py @@ -1778,7 +1778,7 @@ def _convert_starcoder_hf_to_ggml(model_path, outfile_dir, outtype): def _convert_chatglm_hf_to_ggml(model_path, outfile_dir, outtype): filestem = Path(model_path).stem - outfile = os.path.join(outfile_dir, f"ggml-{filestem}-{outtype}.bin") + outfile = os.path.join(outfile_dir, f"bigdl_llm_chatglm_{outtype}.bin") invalidInputError(outtype in ["q4_0", "q4_1"], "For now we only support quantization type 'q4_0' and 'q4_1' " "in chatglm family.")