LLM: fix convert of chatglm (#9190)
This commit is contained in:
parent
af3b575c7e
commit
77afb8796b
1 changed files with 1 additions and 1 deletions
|
|
@ -181,7 +181,7 @@ def optimize(model):
|
|||
# todo implement 4.28.0 ~ 4.30.2
|
||||
pass
|
||||
|
||||
if model.config.architectures[0] == "ChatGLMModel":
|
||||
if model.config.architectures is not None and model.config.architectures[0] == "ChatGLMModel":
|
||||
if model.config.num_layers == 28 and hasattr(model.config, 'rope_ratio'):
|
||||
# chatglm2-6b-32k
|
||||
modeling_module_name = model.__class__.__module__
|
||||
|
|
|
|||
Loading…
Reference in a new issue