LLM: Fix chatglm3-6b-32k error (#10719)
* fix chatglm3-6b-32k * update style
This commit is contained in:
parent
585c174e92
commit
c9e6d42ad1
1 changed files with 2 additions and 1 deletions
|
|
@ -889,7 +889,8 @@ def _optimize_post(model, lightweight_bmm=False):
|
||||||
|
|
||||||
if model.config.architectures is not None \
|
if model.config.architectures is not None \
|
||||||
and model.config.architectures[0] in ["ChatGLMModel", "ChatGLMForConditionalGeneration"]:
|
and model.config.architectures[0] in ["ChatGLMModel", "ChatGLMForConditionalGeneration"]:
|
||||||
if model.config.num_layers == 28 and hasattr(model.config, 'rope_ratio'):
|
if (model.config.num_layers == 28 and hasattr(model.config, 'rope_ratio')
|
||||||
|
and model.config.rope_ratio == 16):
|
||||||
# chatglm2-6b-32k
|
# chatglm2-6b-32k
|
||||||
modeling_module_name = model.__class__.__module__
|
modeling_module_name = model.__class__.__module__
|
||||||
module = importlib.import_module(modeling_module_name)
|
module = importlib.import_module(modeling_module_name)
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue