From 518ef95abce3652d3b5b9e72bf463a89440a01c9 Mon Sep 17 00:00:00 2001 From: Yuwen Hu <54161268+Oscilloscope98@users.noreply.github.com> Date: Tue, 6 Feb 2024 14:58:52 +0800 Subject: [PATCH] Small fix for Nonetype error (#10104) --- python/llm/src/bigdl/llm/transformers/model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/llm/src/bigdl/llm/transformers/model.py b/python/llm/src/bigdl/llm/transformers/model.py index 8b6d6b2f..9c71b045 100644 --- a/python/llm/src/bigdl/llm/transformers/model.py +++ b/python/llm/src/bigdl/llm/transformers/model.py @@ -376,7 +376,8 @@ class _BaseAutoModelClass: # enable tie_word_embeddings for MPT # refer to https://huggingface.co/mosaicml/mpt-7b-chat/blob/main/modeling_mpt.py#L232 - if model.config.architectures[0] != 'MPTForCausalLM': + if model.config.architectures is None \ + or model.config.architectures[0] != 'MPTForCausalLM': model.config.update({"tie_word_embeddings": False}) # add save_low_bit to pretrained model dynamically