diff --git a/extensions/superbig/requirements.txt b/extensions/superbooga/requirements.txt similarity index 100% rename from extensions/superbig/requirements.txt rename to extensions/superbooga/requirements.txt diff --git a/extensions/superbig/script.py b/extensions/superbooga/script.py similarity index 98% rename from extensions/superbig/script.py rename to extensions/superbooga/script.py index dd264b24..0cc16d50 100644 --- a/extensions/superbig/script.py +++ b/extensions/superbooga/script.py @@ -143,7 +143,6 @@ def input_modifier(string): return string - def ui(): with gr.Accordion("Click for more information...", open=False): gr.Markdown(textwrap.dedent(""" @@ -155,6 +154,8 @@ def ui(): The database is then queried during inference time to get the excerpts that are closest to your input. The idea is to create an arbitrarily large pseudocontext. + It is a modified version of the superbig extension by kaiokendev: https://github.com/kaiokendev/superbig + ## How to use it 1) Paste your input text (of whatever length) into the text box below.