From 1cd9ab15b81c534796a8980cd9e72d90e025e4c1 Mon Sep 17 00:00:00 2001 From: binbin Deng <108676127+plusbang@users.noreply.github.com> Date: Tue, 17 Oct 2023 11:52:56 +0800 Subject: [PATCH] LLM: fix ChatGLMConfig check (#9191) --- python/llm/src/bigdl/llm/transformers/convert.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/python/llm/src/bigdl/llm/transformers/convert.py b/python/llm/src/bigdl/llm/transformers/convert.py index 6d03a55d..ee81b347 100644 --- a/python/llm/src/bigdl/llm/transformers/convert.py +++ b/python/llm/src/bigdl/llm/transformers/convert.py @@ -190,7 +190,8 @@ def optimize(model): convert_forward(model, module.SelfAttention, chatglm2_32k_attention_forward) - elif model.config.padded_vocab_size == 65024: + elif hasattr(model.config, 'padded_vocab_size') and \ + model.config.padded_vocab_size == 65024: # chatglm2-6b modeling_module_name = model.__class__.__module__ module = importlib.import_module(modeling_module_name) @@ -203,7 +204,7 @@ def optimize(model): convert_forward(model, module.CoreAttention, core_attn_forward_8eb45c) - elif model.config.vocab_size == 130528: + elif hasattr(model.config, 'vocab_size') and model.config.vocab_size == 130528: # chatglm-6b modeling_module_name = model.__class__.__module__ module = importlib.import_module(modeling_module_name)