From 5f418f61713f934ee5c426167bb4d6bb1d0042d0 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 19 Jun 2023 01:19:28 -0300 Subject: [PATCH] Fix a memory leak (credits for the fix: Ph0rk0z) --- modules/exllama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/exllama.py b/modules/exllama.py index 150a6e82..69eccbf2 100644 --- a/modules/exllama.py +++ b/modules/exllama.py @@ -48,7 +48,7 @@ class ExllamaModel: result.model = model result.cache = cache result.tokenizer = tokenizer - self.generator = generator + result.generator = generator return result, result def generate_with_streaming(self, prompt, state):