LLM: fix error of 'AI-ModelScope/phi-2' hosted by ModelScope hub (#10364)
This commit is contained in:
parent
fe27a6971c
commit
dbcfc5c2fa
1 changed files with 3 additions and 1 deletions
|
|
@ -1071,7 +1071,9 @@ def _optimize_post(model, lightweight_bmm=False):
|
|||
convert_forward(model,
|
||||
module.MixtralBLockSparseTop2MLP,
|
||||
mixtral_mlp_forward)
|
||||
elif model.config.model_type == "phi-msft":
|
||||
elif model.config.model_type == "phi-msft" and \
|
||||
hasattr(model.config, "num_local_experts"):
|
||||
# For phixtral, limit the condition to avoid applying on phi-2 hosted by ModelScope
|
||||
modeling_module_name = model.__class__.__module__
|
||||
module = importlib.import_module(modeling_module_name)
|
||||
from bigdl.llm.transformers.models.phixtral import phixtral_moeblock_forward, \
|
||||
|
|
|
|||
Loading…
Reference in a new issue