From b3e94a32d4fef18d22444827a58c2a9f20f4d8d6 Mon Sep 17 00:00:00 2001 From: Xin Qiu Date: Sun, 8 Oct 2023 09:23:28 +0800 Subject: [PATCH] change log4error import (#9098) --- python/llm/src/bigdl/llm/transformers/models/aquila.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/llm/src/bigdl/llm/transformers/models/aquila.py b/python/llm/src/bigdl/llm/transformers/models/aquila.py index 84abb6b8..66c891a6 100644 --- a/python/llm/src/bigdl/llm/transformers/models/aquila.py +++ b/python/llm/src/bigdl/llm/transformers/models/aquila.py @@ -44,7 +44,7 @@ from torch import nn from bigdl.llm.transformers.models.utils import extend_kv_cache, init_kv_cache, append_kv_cache from bigdl.llm.transformers.models.utils import apply_rotary_pos_emb -from bigdl.dllib.utils import log4Error +from bigdl.llm.utils.common import log4Error KV_CACHE_ALLOC_BLOCK_LENGTH = 256