mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-29 02:49:30 +01:00
Fix broken callbacks.py
This commit is contained in:
parent
9bdb3c784d
commit
d1327f99f9
@ -4,8 +4,6 @@ from threading import Thread
|
|||||||
import torch
|
import torch
|
||||||
import transformers
|
import transformers
|
||||||
|
|
||||||
from modules.text_generation import clear_torch_cache
|
|
||||||
|
|
||||||
|
|
||||||
# Copied from https://github.com/PygmalionAI/gradio-ui/
|
# Copied from https://github.com/PygmalionAI/gradio-ui/
|
||||||
class _SentinelTokenStoppingCriteria(transformers.StoppingCriteria):
|
class _SentinelTokenStoppingCriteria(transformers.StoppingCriteria):
|
||||||
@ -89,3 +87,8 @@ class Iteratorize:
|
|||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
self.stop_now = True
|
self.stop_now = True
|
||||||
clear_torch_cache()
|
clear_torch_cache()
|
||||||
|
|
||||||
|
def clear_torch_cache():
|
||||||
|
gc.collect()
|
||||||
|
if not shared.args.cpu:
|
||||||
|
torch.cuda.empty_cache()
|
||||||
|
Loading…
Reference in New Issue
Block a user