Update llama_attn_hijack.py (#4231)

This commit is contained in:
Forkoz 2023-10-08 18:16:48 +00:00 committed by GitHub
parent 2e8b5f7c80
commit 2e471071af
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -17,6 +17,7 @@ if shared.args.xformers:
def hijack_llama_attention(): def hijack_llama_attention():
import transformers.models.llama.modeling_llama
if shared.args.xformers: if shared.args.xformers:
transformers.models.llama.modeling_llama.LlamaAttention.forward = xformers_forward transformers.models.llama.modeling_llama.LlamaAttention.forward = xformers_forward
logger.info("Replaced attention with xformers_attention") logger.info("Replaced attention with xformers_attention")