use sdp_causal to reduce internvl2-4b memory usage if set environment variable (#11953)

This commit is contained in:
Yishuo Wang 2024-08-28 17:35:05 +08:00 committed by GitHub
parent 0a7bd274e2
commit 0fbb10259a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -160,15 +160,17 @@ def attention_forward(
else:
attn_output = xe_addons.sdp(query_states, key_states, value_states,
attention_mask)
# disable sdp_causal to avoid overflow for now
# elif use_sdp_causal(q_len, kv_seq_len, self.head_dim, query_states, self.training):
# import xe_addons
# if isinstance(past_key_value, DynamicFp8Cache):
# attn_output = xe_addons.sdp_fp8_causal(query_states, key_states,
# value_states, attention_mask)
# else:
# attn_output = xe_addons.sdp_causal(query_states, key_states,
# value_states, attention_mask)
elif (
use_sdp_causal(q_len, kv_seq_len, self.head_dim, query_states, self.training)
and os.environ.get("IPEX_LLM_LOW_MEM", "0") == "1"
):
import xe_addons
if isinstance(past_key_value, DynamicFp8Cache):
attn_output = xe_addons.sdp_fp8_causal(query_states, key_states,
value_states, attention_mask)
else:
attn_output = xe_addons.sdp_causal(query_states, key_states,
value_states, attention_mask)
else:
if use_quantizekv:
key_states, value_states = restore_fp8_kv_cache(key_states, value_states,