diff --git a/python/llm/src/bigdl/llm/transformers/models/aquila.py b/python/llm/src/bigdl/llm/transformers/models/aquila.py index 84abb6b8..66c891a6 100644 --- a/python/llm/src/bigdl/llm/transformers/models/aquila.py +++ b/python/llm/src/bigdl/llm/transformers/models/aquila.py @@ -44,7 +44,7 @@ from torch import nn from bigdl.llm.transformers.models.utils import extend_kv_cache, init_kv_cache, append_kv_cache from bigdl.llm.transformers.models.utils import apply_rotary_pos_emb -from bigdl.dllib.utils import log4Error +from bigdl.llm.utils.common import log4Error KV_CACHE_ALLOC_BLOCK_LENGTH = 256