Update modeling_phi.py
This commit is contained in:
parent
39afec137e
commit
accfee56d8
@ -506,7 +506,7 @@ class PhiFlashAttention2(PhiAttention):
|
||||
value_states = value_states.to(target_dtype)
|
||||
|
||||
attn_output = self._flash_attention_forward(
|
||||
query_states, key_states, value_states, attention_mask, q_len, dropout=attn_dropout, softmax_scale=1.0
|
||||
query_states, key_states, value_states, attention_mask, q_len, dropout=attn_dropout, softmax_scale=None
|
||||
)
|
||||
|
||||
attn_output = attn_output.reshape(bsz, q_len, self.hidden_size).contiguous()
|
||||
|
Loading…
x
Reference in New Issue
Block a user