hot-fix silu error import (#10292)

This commit is contained in:
SONG Ge 2024-03-01 10:11:37 +08:00 committed by GitHub
parent 90f2f82638
commit 273de341d7

View file

@ -34,7 +34,7 @@ from bigdl.llm.transformers.models.utils import apply_rotary_pos_emb, \
from bigdl.llm.transformers.models.utils import init_kv_cache, extend_kv_cache, append_kv_cache from bigdl.llm.transformers.models.utils import init_kv_cache, extend_kv_cache, append_kv_cache
from bigdl.llm.transformers.models.utils import init_fp8_kv_cache, append_fp8_kv_cache, \ from bigdl.llm.transformers.models.utils import init_fp8_kv_cache, append_fp8_kv_cache, \
restore_fp8_kv_cache, use_quantize_kv_cache restore_fp8_kv_cache, use_quantize_kv_cache
from bigdl.llm.transformers.models.utils import is_enough_kv_cache_room_4_31. SILU from bigdl.llm.transformers.models.utils import is_enough_kv_cache_room_4_31, SILU
from bigdl.llm.transformers.low_bit_linear import SYM_INT4, FP8E5 from bigdl.llm.transformers.low_bit_linear import SYM_INT4, FP8E5
KV_CACHE_ALLOC_BLOCK_LENGTH = 256 KV_CACHE_ALLOC_BLOCK_LENGTH = 256