mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-21 15:48:04 +01:00
Bump gradio to 4.23 (#5758)
This commit is contained in:
parent
49b111e2dd
commit
2a92a842ce
@ -89,6 +89,11 @@ div.svelte-15lo0d8 > *, div.svelte-15lo0d8 > .form > * {
|
|||||||
flex-wrap: nowrap;
|
flex-wrap: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gradio-app > :first-child {
|
||||||
|
padding-left: var(--size-4) !important;
|
||||||
|
padding-right: var(--size-4) !important;
|
||||||
|
}
|
||||||
|
|
||||||
.header_bar {
|
.header_bar {
|
||||||
background-color: #f7f7f7;
|
background-color: #f7f7f7;
|
||||||
box-shadow: 0 2px 3px rgba(22 22 22 / 35%);
|
box-shadow: 0 2px 3px rgba(22 22 22 / 35%);
|
||||||
|
@ -119,7 +119,7 @@ def ui():
|
|||||||
samples_per_page=settings["gallery-items_per_page"]
|
samples_per_page=settings["gallery-items_per_page"]
|
||||||
)
|
)
|
||||||
|
|
||||||
filter_box.change(lambda: None, None, None, _js=f'() => {{{custom_js()}; gotoFirstPage()}}').success(
|
filter_box.change(lambda: None, None, None, js=f'() => {{{custom_js()}; gotoFirstPage()}}').success(
|
||||||
filter_cards, filter_box, gallery).then(
|
filter_cards, filter_box, gallery).then(
|
||||||
lambda x: gr.update(elem_classes='highlighted-border' if x != '' else ''), filter_box, filter_box, show_progress=False)
|
lambda x: gr.update(elem_classes='highlighted-border' if x != '' else ''), filter_box, filter_box, show_progress=False)
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@ def ui():
|
|||||||
|
|
||||||
audio.change(
|
audio.change(
|
||||||
auto_transcribe, [audio, auto_submit, whipser_model, whipser_language], [shared.gradio['textbox'], audio]).then(
|
auto_transcribe, [audio, auto_submit, whipser_model, whipser_language], [shared.gradio['textbox'], audio]).then(
|
||||||
None, auto_submit, None, _js="(check) => {if (check) { document.getElementById('Generate').click() }}")
|
None, auto_submit, None, js="(check) => {if (check) { document.getElementById('Generate').click() }}")
|
||||||
|
|
||||||
whipser_model.change(lambda x: params.update({"whipser_model": x}), whipser_model, None)
|
whipser_model.change(lambda x: params.update({"whipser_model": x}), whipser_model, None)
|
||||||
whipser_language.change(lambda x: params.update({"whipser_language": x}), whipser_language, None)
|
whipser_language.change(lambda x: params.update({"whipser_language": x}), whipser_language, None)
|
||||||
|
@ -32,27 +32,27 @@ function switch_to_chat() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function switch_to_default() {
|
function switch_to_default() {
|
||||||
let default_tab_button = main_parent.childNodes[0].childNodes[4];
|
let default_tab_button = main_parent.childNodes[0].childNodes[5];
|
||||||
default_tab_button.click();
|
default_tab_button.click();
|
||||||
scrollToTop();
|
scrollToTop();
|
||||||
}
|
}
|
||||||
|
|
||||||
function switch_to_notebook() {
|
function switch_to_notebook() {
|
||||||
let notebook_tab_button = main_parent.childNodes[0].childNodes[7];
|
let notebook_tab_button = main_parent.childNodes[0].childNodes[9];
|
||||||
notebook_tab_button.click();
|
notebook_tab_button.click();
|
||||||
findButtonsByText("Raw")[1].click();
|
findButtonsByText("Raw")[1].click();
|
||||||
scrollToTop();
|
scrollToTop();
|
||||||
}
|
}
|
||||||
|
|
||||||
function switch_to_generation_parameters() {
|
function switch_to_generation_parameters() {
|
||||||
let parameters_tab_button = main_parent.childNodes[0].childNodes[10];
|
let parameters_tab_button = main_parent.childNodes[0].childNodes[13];
|
||||||
parameters_tab_button.click();
|
parameters_tab_button.click();
|
||||||
findButtonsByText("Generation")[0].click();
|
findButtonsByText("Generation")[0].click();
|
||||||
scrollToTop();
|
scrollToTop();
|
||||||
}
|
}
|
||||||
|
|
||||||
function switch_to_character() {
|
function switch_to_character() {
|
||||||
let parameters_tab_button = main_parent.childNodes[0].childNodes[10];
|
let parameters_tab_button = main_parent.childNodes[0].childNodes[13];
|
||||||
parameters_tab_button.click();
|
parameters_tab_button.click();
|
||||||
findButtonsByText("Character")[0].click();
|
findButtonsByText("Character")[0].click();
|
||||||
scrollToTop();
|
scrollToTop();
|
||||||
|
@ -43,8 +43,9 @@ def my_open(*args, **kwargs):
|
|||||||
with original_open(*args, **kwargs) as f:
|
with original_open(*args, **kwargs) as f:
|
||||||
file_contents = f.read()
|
file_contents = f.read()
|
||||||
|
|
||||||
file_contents = file_contents.replace(b'\t\t<script\n\t\t\tsrc="https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.3.7/iframeResizer.contentWindow.min.js"\n\t\t\tasync\n\t\t></script>', b'')
|
file_contents = file_contents.replace(b'\t\t<script\n\t\t\tsrc="https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.3.9/iframeResizer.contentWindow.min.js"\n\t\t\tasync\n\t\t></script>', b'')
|
||||||
file_contents = file_contents.replace(b'cdnjs.cloudflare.com', b'127.0.0.1')
|
file_contents = file_contents.replace(b'cdnjs.cloudflare.com', b'127.0.0.1')
|
||||||
|
|
||||||
return io.BytesIO(file_contents)
|
return io.BytesIO(file_contents)
|
||||||
else:
|
else:
|
||||||
return original_open(*args, **kwargs)
|
return original_open(*args, **kwargs)
|
||||||
|
72
modules/gradio_hijack.py
Normal file
72
modules/gradio_hijack.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
'''
|
||||||
|
Copied from: https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/14184
|
||||||
|
'''
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import warnings
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
import gradio as gr
|
||||||
|
|
||||||
|
|
||||||
|
class GradioDeprecationWarning(DeprecationWarning):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def repair(grclass):
|
||||||
|
if not getattr(grclass, 'EVENTS', None):
|
||||||
|
return
|
||||||
|
|
||||||
|
@wraps(grclass.__init__)
|
||||||
|
def __repaired_init__(self, *args, tooltip=None, source=None, original=grclass.__init__, **kwargs):
|
||||||
|
if source:
|
||||||
|
kwargs["sources"] = [source]
|
||||||
|
|
||||||
|
allowed_kwargs = inspect.signature(original).parameters
|
||||||
|
fixed_kwargs = {}
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
if k in allowed_kwargs:
|
||||||
|
fixed_kwargs[k] = v
|
||||||
|
else:
|
||||||
|
warnings.warn(f"unexpected argument for {grclass.__name__}: {k}", GradioDeprecationWarning, stacklevel=2)
|
||||||
|
|
||||||
|
original(self, *args, **fixed_kwargs)
|
||||||
|
|
||||||
|
self.webui_tooltip = tooltip
|
||||||
|
|
||||||
|
for event in self.EVENTS:
|
||||||
|
replaced_event = getattr(self, str(event))
|
||||||
|
|
||||||
|
def fun(*xargs, _js=None, replaced_event=replaced_event, **xkwargs):
|
||||||
|
if _js:
|
||||||
|
xkwargs['js'] = _js
|
||||||
|
|
||||||
|
return replaced_event(*xargs, **xkwargs)
|
||||||
|
|
||||||
|
setattr(self, str(event), fun)
|
||||||
|
|
||||||
|
grclass.__init__ = __repaired_init__
|
||||||
|
grclass.update = gr.update
|
||||||
|
|
||||||
|
|
||||||
|
for component in set(gr.components.__all__ + gr.layouts.__all__):
|
||||||
|
repair(getattr(gr, component, None))
|
||||||
|
|
||||||
|
|
||||||
|
class Dependency(gr.events.Dependency):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def then(*xargs, _js=None, **xkwargs):
|
||||||
|
if _js:
|
||||||
|
xkwargs['js'] = _js
|
||||||
|
|
||||||
|
return original_then(*xargs, **xkwargs)
|
||||||
|
|
||||||
|
original_then = self.then
|
||||||
|
self.then = then
|
||||||
|
|
||||||
|
|
||||||
|
gr.events.Dependency = Dependency
|
||||||
|
|
||||||
|
gr.Box = gr.Group
|
@ -46,7 +46,6 @@ settings = {
|
|||||||
'truncation_length_min': 0,
|
'truncation_length_min': 0,
|
||||||
'truncation_length_max': 200000,
|
'truncation_length_max': 200000,
|
||||||
'max_tokens_second': 0,
|
'max_tokens_second': 0,
|
||||||
'max_updates_second': 0,
|
|
||||||
'prompt_lookup_num_tokens': 0,
|
'prompt_lookup_num_tokens': 0,
|
||||||
'custom_stopping_strings': '',
|
'custom_stopping_strings': '',
|
||||||
'custom_token_bans': '',
|
'custom_token_bans': '',
|
||||||
|
@ -81,19 +81,16 @@ def _generate_reply(question, state, stopping_strings=None, is_chat=False, escap
|
|||||||
state = copy.deepcopy(state)
|
state = copy.deepcopy(state)
|
||||||
state['stream'] = True
|
state['stream'] = True
|
||||||
|
|
||||||
min_update_interval = 0
|
|
||||||
if state.get('max_updates_second', 0) > 0:
|
|
||||||
min_update_interval = 1 / state['max_updates_second']
|
|
||||||
|
|
||||||
# Generate
|
# Generate
|
||||||
for reply in generate_func(question, original_question, seed, state, stopping_strings, is_chat=is_chat):
|
for reply in generate_func(question, original_question, seed, state, stopping_strings, is_chat=is_chat):
|
||||||
reply, stop_found = apply_stopping_strings(reply, all_stop_strings)
|
reply, stop_found = apply_stopping_strings(reply, all_stop_strings)
|
||||||
if escape_html:
|
if escape_html:
|
||||||
reply = html.escape(reply)
|
reply = html.escape(reply)
|
||||||
|
|
||||||
if is_stream:
|
if is_stream:
|
||||||
cur_time = time.time()
|
cur_time = time.time()
|
||||||
|
|
||||||
# Maximum number of tokens/second
|
# Limit number of tokens/second to make text readable in real time
|
||||||
if state['max_tokens_second'] > 0:
|
if state['max_tokens_second'] > 0:
|
||||||
diff = 1 / state['max_tokens_second'] - (cur_time - last_update)
|
diff = 1 / state['max_tokens_second'] - (cur_time - last_update)
|
||||||
if diff > 0:
|
if diff > 0:
|
||||||
@ -101,13 +98,8 @@ def _generate_reply(question, state, stopping_strings=None, is_chat=False, escap
|
|||||||
|
|
||||||
last_update = time.time()
|
last_update = time.time()
|
||||||
yield reply
|
yield reply
|
||||||
|
|
||||||
# Limit updates to avoid lag in the Gradio UI
|
|
||||||
# API updates are not limited
|
|
||||||
else:
|
else:
|
||||||
if cur_time - last_update > min_update_interval:
|
yield reply
|
||||||
last_update = cur_time
|
|
||||||
yield reply
|
|
||||||
|
|
||||||
if stop_found or (state['max_tokens_second'] > 0 and shared.stop_everything):
|
if stop_found or (state['max_tokens_second'] > 0 and shared.stop_everything):
|
||||||
break
|
break
|
||||||
|
@ -117,7 +117,6 @@ def list_interface_input_elements():
|
|||||||
'max_new_tokens',
|
'max_new_tokens',
|
||||||
'auto_max_new_tokens',
|
'auto_max_new_tokens',
|
||||||
'max_tokens_second',
|
'max_tokens_second',
|
||||||
'max_updates_second',
|
|
||||||
'prompt_lookup_num_tokens',
|
'prompt_lookup_num_tokens',
|
||||||
'seed',
|
'seed',
|
||||||
'temperature',
|
'temperature',
|
||||||
|
@ -181,7 +181,7 @@ def create_event_handlers():
|
|||||||
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['textbox'].submit(
|
shared.gradio['textbox'].submit(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
@ -189,28 +189,28 @@ def create_event_handlers():
|
|||||||
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
chat.generate_chat_reply_wrapper, gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['Regenerate'].click(
|
shared.gradio['Regenerate'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
partial(chat.generate_chat_reply_wrapper, regenerate=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
partial(chat.generate_chat_reply_wrapper, regenerate=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['Continue'].click(
|
shared.gradio['Continue'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
partial(chat.generate_chat_reply_wrapper, _continue=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
partial(chat.generate_chat_reply_wrapper, _continue=True), gradio(inputs), gradio('display', 'history'), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['Impersonate'].click(
|
shared.gradio['Impersonate'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda x: x, gradio('textbox'), gradio('Chat input'), show_progress=False).then(
|
lambda x: x, gradio('textbox'), gradio('Chat input'), show_progress=False).then(
|
||||||
chat.impersonate_wrapper, gradio(inputs), gradio('textbox', 'display'), show_progress=False).then(
|
chat.impersonate_wrapper, gradio(inputs), gradio('textbox', 'display'), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['Replace last reply'].click(
|
shared.gradio['Replace last reply'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
@ -288,7 +288,7 @@ def create_event_handlers():
|
|||||||
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
|
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
|
||||||
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
|
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
|
||||||
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
chat.save_history, gradio('history', 'unique_id', 'character_menu', 'mode'), None).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_chat()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_chat()}}')
|
||||||
|
|
||||||
shared.gradio['character_menu'].change(
|
shared.gradio['character_menu'].change(
|
||||||
chat.load_character, gradio('character_menu', 'name1', 'name2'), gradio('name1', 'name2', 'character_picture', 'greeting', 'context')).success(
|
chat.load_character, gradio('character_menu', 'name1', 'name2'), gradio('name1', 'name2', 'character_picture', 'greeting', 'context')).success(
|
||||||
@ -296,7 +296,7 @@ def create_event_handlers():
|
|||||||
chat.load_latest_history, gradio('interface_state'), gradio('history')).then(
|
chat.load_latest_history, gradio('interface_state'), gradio('history')).then(
|
||||||
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
|
chat.redraw_html, gradio(reload_arr), gradio('display')).then(
|
||||||
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
|
lambda x: gr.update(choices=(histories := chat.find_all_histories(x)), value=histories[0]), gradio('interface_state'), gradio('unique_id')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.update_big_picture_js}; updateBigPicture()}}')
|
lambda: None, None, None, js=f'() => {{{ui.update_big_picture_js}; updateBigPicture()}}')
|
||||||
|
|
||||||
shared.gradio['mode'].change(
|
shared.gradio['mode'].change(
|
||||||
lambda x: [gr.update(visible=x != 'instruct'), gr.update(visible=x == 'chat-instruct')], gradio('mode'), gradio('chat_style', 'chat-instruct_command'), show_progress=False).then(
|
lambda x: [gr.update(visible=x != 'instruct'), gr.update(visible=x == 'chat-instruct')], gradio('mode'), gradio('chat_style', 'chat-instruct_command'), show_progress=False).then(
|
||||||
@ -332,15 +332,15 @@ def create_event_handlers():
|
|||||||
|
|
||||||
shared.gradio['save_chat_history'].click(
|
shared.gradio['save_chat_history'].click(
|
||||||
lambda x: json.dumps(x, indent=4), gradio('history'), gradio('temporary_text')).then(
|
lambda x: json.dumps(x, indent=4), gradio('history'), gradio('temporary_text')).then(
|
||||||
None, gradio('temporary_text', 'character_menu', 'mode'), None, _js=f'(hist, char, mode) => {{{ui.save_files_js}; saveHistory(hist, char, mode)}}')
|
None, gradio('temporary_text', 'character_menu', 'mode'), None, js=f'(hist, char, mode) => {{{ui.save_files_js}; saveHistory(hist, char, mode)}}')
|
||||||
|
|
||||||
shared.gradio['Submit character'].click(
|
shared.gradio['Submit character'].click(
|
||||||
chat.upload_character, gradio('upload_json', 'upload_img_bot'), gradio('character_menu')).then(
|
chat.upload_character, gradio('upload_json', 'upload_img_bot'), gradio('character_menu')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')
|
||||||
|
|
||||||
shared.gradio['Submit tavern character'].click(
|
shared.gradio['Submit tavern character'].click(
|
||||||
chat.upload_tavern_character, gradio('upload_img_tavern', 'tavern_json'), gradio('character_menu')).then(
|
chat.upload_tavern_character, gradio('upload_img_tavern', 'tavern_json'), gradio('character_menu')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_character()}}')
|
||||||
|
|
||||||
shared.gradio['upload_json'].upload(lambda: gr.update(interactive=True), None, gradio('Submit character'))
|
shared.gradio['upload_json'].upload(lambda: gr.update(interactive=True), None, gradio('Submit character'))
|
||||||
shared.gradio['upload_json'].clear(lambda: gr.update(interactive=False), None, gradio('Submit character'))
|
shared.gradio['upload_json'].clear(lambda: gr.update(interactive=False), None, gradio('Submit character'))
|
||||||
@ -354,28 +354,28 @@ def create_event_handlers():
|
|||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
|
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
|
||||||
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('textbox-default')).then(
|
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('textbox-default')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')
|
||||||
|
|
||||||
shared.gradio['send_instruction_to_notebook'].click(
|
shared.gradio['send_instruction_to_notebook'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
|
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
|
||||||
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('textbox-notebook')).then(
|
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('textbox-notebook')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')
|
||||||
|
|
||||||
shared.gradio['send_instruction_to_negative_prompt'].click(
|
shared.gradio['send_instruction_to_negative_prompt'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
|
lambda x: x.update({'mode': 'instruct', 'history': {'internal': [], 'visible': []}}), gradio('interface_state'), None).then(
|
||||||
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('negative_prompt')).then(
|
partial(chat.generate_chat_prompt, 'Input'), gradio('interface_state'), gradio('negative_prompt')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_generation_parameters()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_generation_parameters()}}')
|
||||||
|
|
||||||
shared.gradio['send-chat-to-default'].click(
|
shared.gradio['send-chat-to-default'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
partial(chat.generate_chat_prompt, '', _continue=True), gradio('interface_state'), gradio('textbox-default')).then(
|
partial(chat.generate_chat_prompt, '', _continue=True), gradio('interface_state'), gradio('textbox-default')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_default()}}')
|
||||||
|
|
||||||
shared.gradio['send-chat-to-notebook'].click(
|
shared.gradio['send-chat-to-notebook'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
partial(chat.generate_chat_prompt, '', _continue=True), gradio('interface_state'), gradio('textbox-notebook')).then(
|
partial(chat.generate_chat_prompt, '', _continue=True), gradio('interface_state'), gradio('textbox-notebook')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')
|
lambda: None, None, None, js=f'() => {{{ui.switch_tabs_js}; switch_to_notebook()}}')
|
||||||
|
|
||||||
shared.gradio['show_controls'].change(None, gradio('show_controls'), None, _js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
|
shared.gradio['show_controls'].change(None, gradio('show_controls'), None, js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
|
||||||
|
@ -67,21 +67,21 @@ def create_event_handlers():
|
|||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['textbox-default'].submit(
|
shared.gradio['textbox-default'].submit(
|
||||||
lambda x: x, gradio('textbox-default'), gradio('last_input-default')).then(
|
lambda x: x, gradio('textbox-default'), gradio('last_input-default')).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['markdown_render-default'].click(lambda x: x, gradio('output_textbox'), gradio('markdown-default'), queue=False)
|
shared.gradio['markdown_render-default'].click(lambda x: x, gradio('output_textbox'), gradio('markdown-default'), queue=False)
|
||||||
shared.gradio['Continue-default'].click(
|
shared.gradio['Continue-default'].click(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
generate_reply_wrapper, [shared.gradio['output_textbox']] + gradio(inputs)[1:], gradio(outputs), show_progress=False).then(
|
generate_reply_wrapper, [shared.gradio['output_textbox']] + gradio(inputs)[1:], gradio(outputs), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['Stop-default'].click(stop_everything_event, None, None, queue=False)
|
shared.gradio['Stop-default'].click(stop_everything_event, None, None, queue=False)
|
||||||
shared.gradio['prompt_menu-default'].change(load_prompt, gradio('prompt_menu-default'), gradio('textbox-default'), show_progress=False)
|
shared.gradio['prompt_menu-default'].change(load_prompt, gradio('prompt_menu-default'), gradio('textbox-default'), show_progress=False)
|
||||||
|
@ -75,7 +75,7 @@ def create_ui():
|
|||||||
with gr.Row():
|
with gr.Row():
|
||||||
with gr.Column():
|
with gr.Column():
|
||||||
shared.gradio['loader'] = gr.Dropdown(label="Model loader", choices=loaders.loaders_and_params.keys(), value=None)
|
shared.gradio['loader'] = gr.Dropdown(label="Model loader", choices=loaders.loaders_and_params.keys(), value=None)
|
||||||
with gr.Box():
|
with gr.Blocks():
|
||||||
with gr.Row():
|
with gr.Row():
|
||||||
with gr.Column():
|
with gr.Column():
|
||||||
with gr.Blocks():
|
with gr.Blocks():
|
||||||
|
@ -67,14 +67,14 @@ def create_event_handlers():
|
|||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['textbox-notebook'].submit(
|
shared.gradio['textbox-notebook'].submit(
|
||||||
lambda x: x, gradio('textbox-notebook'), gradio('last_input-notebook')).then(
|
lambda x: x, gradio('textbox-notebook'), gradio('last_input-notebook')).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['Undo'].click(lambda x: x, gradio('last_input-notebook'), gradio('textbox-notebook'), show_progress=False)
|
shared.gradio['Undo'].click(lambda x: x, gradio('last_input-notebook'), gradio('textbox-notebook'), show_progress=False)
|
||||||
shared.gradio['markdown_render-notebook'].click(lambda x: x, gradio('textbox-notebook'), gradio('markdown-notebook'), queue=False)
|
shared.gradio['markdown_render-notebook'].click(lambda x: x, gradio('textbox-notebook'), gradio('markdown-notebook'), queue=False)
|
||||||
@ -83,7 +83,7 @@ def create_event_handlers():
|
|||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
generate_reply_wrapper, gradio(inputs), gradio(outputs), show_progress=False).then(
|
||||||
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
ui.gather_interface_values, gradio(shared.input_elements), gradio('interface_state')).then(
|
||||||
lambda: None, None, None, _js=f'() => {{{ui.audio_notification_js}}}')
|
lambda: None, None, None, js=f'() => {{{ui.audio_notification_js}}}')
|
||||||
|
|
||||||
shared.gradio['Stop-notebook'].click(stop_everything_event, None, None, queue=False)
|
shared.gradio['Stop-notebook'].click(stop_everything_event, None, None, queue=False)
|
||||||
shared.gradio['prompt_menu-notebook'].change(load_prompt, gradio('prompt_menu-notebook'), gradio('textbox-notebook'), show_progress=False)
|
shared.gradio['prompt_menu-notebook'].change(load_prompt, gradio('prompt_menu-notebook'), gradio('textbox-notebook'), show_progress=False)
|
||||||
|
@ -73,7 +73,6 @@ def create_ui(default_preset):
|
|||||||
with gr.Column():
|
with gr.Column():
|
||||||
shared.gradio['truncation_length'] = gr.Slider(value=get_truncation_length(), minimum=shared.settings['truncation_length_min'], maximum=shared.settings['truncation_length_max'], step=256, label='Truncate the prompt up to this length', info='The leftmost tokens are removed if the prompt exceeds this length. Most models require this to be at most 2048.')
|
shared.gradio['truncation_length'] = gr.Slider(value=get_truncation_length(), minimum=shared.settings['truncation_length_min'], maximum=shared.settings['truncation_length_max'], step=256, label='Truncate the prompt up to this length', info='The leftmost tokens are removed if the prompt exceeds this length. Most models require this to be at most 2048.')
|
||||||
shared.gradio['max_tokens_second'] = gr.Slider(value=shared.settings['max_tokens_second'], minimum=0, maximum=20, step=1, label='Maximum tokens/second', info='To make text readable in real time.')
|
shared.gradio['max_tokens_second'] = gr.Slider(value=shared.settings['max_tokens_second'], minimum=0, maximum=20, step=1, label='Maximum tokens/second', info='To make text readable in real time.')
|
||||||
shared.gradio['max_updates_second'] = gr.Slider(value=shared.settings['max_updates_second'], minimum=0, maximum=24, step=1, label='Maximum UI updates/second', info='Set this if you experience lag in the UI during streaming.')
|
|
||||||
shared.gradio['prompt_lookup_num_tokens'] = gr.Slider(value=shared.settings['prompt_lookup_num_tokens'], minimum=0, maximum=10, step=1, label='prompt_lookup_num_tokens', info='Activates Prompt Lookup Decoding.')
|
shared.gradio['prompt_lookup_num_tokens'] = gr.Slider(value=shared.settings['prompt_lookup_num_tokens'], minimum=0, maximum=10, step=1, label='prompt_lookup_num_tokens', info='Activates Prompt Lookup Decoding.')
|
||||||
|
|
||||||
shared.gradio['custom_stopping_strings'] = gr.Textbox(lines=2, value=shared.settings["custom_stopping_strings"] or None, label='Custom stopping strings', info='In addition to the defaults. Written between "" and separated by commas.', placeholder='"\\n", "\\nYou:"')
|
shared.gradio['custom_stopping_strings'] = gr.Textbox(lines=2, value=shared.settings["custom_stopping_strings"] or None, label='Custom stopping strings', info='In addition to the defaults. Written between "" and separated by commas.', placeholder='"\\n", "\\nYou:"')
|
||||||
|
@ -32,10 +32,10 @@ def create_ui():
|
|||||||
# Reset interface event
|
# Reset interface event
|
||||||
shared.gradio['reset_interface'].click(
|
shared.gradio['reset_interface'].click(
|
||||||
set_interface_arguments, gradio('extensions_menu', 'bool_menu'), None).then(
|
set_interface_arguments, gradio('extensions_menu', 'bool_menu'), None).then(
|
||||||
lambda: None, None, None, _js='() => {document.body.innerHTML=\'<h1 style="font-family:monospace;padding-top:20%;margin:0;height:100vh;color:lightgray;text-align:center;background:var(--body-background-fill)">Reloading...</h1>\'; setTimeout(function(){location.reload()},2500); return []}')
|
lambda: None, None, None, js='() => {document.body.innerHTML=\'<h1 style="font-family:monospace;padding-top:20%;margin:0;height:100vh;color:lightgray;text-align:center;background:var(--body-background-fill)">Reloading...</h1>\'; setTimeout(function(){location.reload()},2500); return []}')
|
||||||
|
|
||||||
shared.gradio['toggle_dark_mode'].click(
|
shared.gradio['toggle_dark_mode'].click(
|
||||||
lambda: None, None, None, _js='() => {document.getElementsByTagName("body")[0].classList.toggle("dark")}').then(
|
lambda: None, None, None, js='() => {document.getElementsByTagName("body")[0].classList.toggle("dark")}').then(
|
||||||
lambda x: 'dark' if x == 'light' else 'light', gradio('theme_state'), gradio('theme_state'))
|
lambda x: 'dark' if x == 'light' else 'light', gradio('theme_state'), gradio('theme_state'))
|
||||||
|
|
||||||
shared.gradio['save_settings'].click(
|
shared.gradio['save_settings'].click(
|
||||||
|
@ -4,7 +4,7 @@ bitsandbytes==0.43.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -2,7 +2,7 @@ accelerate==0.27.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -2,7 +2,7 @@ accelerate==0.27.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -2,7 +2,7 @@ accelerate==0.27.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -2,7 +2,7 @@ accelerate==0.27.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -2,7 +2,7 @@ accelerate==0.27.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -2,7 +2,7 @@ accelerate==0.27.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -4,7 +4,7 @@ bitsandbytes==0.43.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
@ -2,7 +2,7 @@ accelerate==0.27.*
|
|||||||
colorama
|
colorama
|
||||||
datasets
|
datasets
|
||||||
einops
|
einops
|
||||||
gradio==3.50.*
|
gradio==4.23.*
|
||||||
hqq==0.1.5
|
hqq==0.1.5
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
lm_eval==0.3.0
|
lm_eval==0.3.0
|
||||||
|
15
server.py
15
server.py
@ -18,6 +18,7 @@ warnings.filterwarnings('ignore', category=UserWarning, message='The value passe
|
|||||||
warnings.filterwarnings('ignore', category=UserWarning, message='Field "model_names" has conflict')
|
warnings.filterwarnings('ignore', category=UserWarning, message='Field "model_names" has conflict')
|
||||||
|
|
||||||
with RequestBlocker():
|
with RequestBlocker():
|
||||||
|
from modules import gradio_hijack
|
||||||
import gradio as gr
|
import gradio as gr
|
||||||
|
|
||||||
import matplotlib
|
import matplotlib
|
||||||
@ -145,11 +146,9 @@ def create_interface():
|
|||||||
ui_model_menu.create_event_handlers()
|
ui_model_menu.create_event_handlers()
|
||||||
|
|
||||||
# Interface launch events
|
# Interface launch events
|
||||||
if shared.settings['dark_theme']:
|
shared.gradio['interface'].load(lambda: None, None, None, js=f"() => {{if ({str(shared.settings['dark_theme']).lower()}) {{ document.getElementsByTagName('body')[0].classList.add('dark'); }} }}")
|
||||||
shared.gradio['interface'].load(lambda: None, None, None, _js="() => document.getElementsByTagName('body')[0].classList.add('dark')")
|
shared.gradio['interface'].load(lambda: None, None, None, js=f"() => {{{js}}}")
|
||||||
|
shared.gradio['interface'].load(lambda x: None, gradio('show_controls'), None, js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
|
||||||
shared.gradio['interface'].load(lambda: None, None, None, _js=f"() => {{{js}}}")
|
|
||||||
shared.gradio['interface'].load(None, gradio('show_controls'), None, _js=f'(x) => {{{ui.show_controls_js}; toggle_controls(x)}}')
|
|
||||||
shared.gradio['interface'].load(partial(ui.apply_interface_values, {}, use_persistent=True), None, gradio(ui.list_interface_input_elements()), show_progress=False)
|
shared.gradio['interface'].load(partial(ui.apply_interface_values, {}, use_persistent=True), None, gradio(ui.list_interface_input_elements()), show_progress=False)
|
||||||
shared.gradio['interface'].load(chat.redraw_html, gradio(ui_chat.reload_arr), gradio('display'))
|
shared.gradio['interface'].load(chat.redraw_html, gradio(ui_chat.reload_arr), gradio('display'))
|
||||||
|
|
||||||
@ -157,9 +156,10 @@ def create_interface():
|
|||||||
extensions_module.create_extensions_block() # Extensions block
|
extensions_module.create_extensions_block() # Extensions block
|
||||||
|
|
||||||
# Launch the interface
|
# Launch the interface
|
||||||
shared.gradio['interface'].queue(concurrency_count=64)
|
shared.gradio['interface'].queue()
|
||||||
with OpenMonkeyPatch():
|
with OpenMonkeyPatch():
|
||||||
shared.gradio['interface'].launch(
|
shared.gradio['interface'].launch(
|
||||||
|
max_threads=64,
|
||||||
prevent_thread_lock=True,
|
prevent_thread_lock=True,
|
||||||
share=shared.args.share,
|
share=shared.args.share,
|
||||||
server_name=None if not shared.args.listen else (shared.args.listen_host or '0.0.0.0'),
|
server_name=None if not shared.args.listen else (shared.args.listen_host or '0.0.0.0'),
|
||||||
@ -168,7 +168,8 @@ def create_interface():
|
|||||||
auth=auth or None,
|
auth=auth or None,
|
||||||
ssl_verify=False if (shared.args.ssl_keyfile or shared.args.ssl_certfile) else True,
|
ssl_verify=False if (shared.args.ssl_keyfile or shared.args.ssl_certfile) else True,
|
||||||
ssl_keyfile=shared.args.ssl_keyfile,
|
ssl_keyfile=shared.args.ssl_keyfile,
|
||||||
ssl_certfile=shared.args.ssl_certfile
|
ssl_certfile=shared.args.ssl_certfile,
|
||||||
|
allowed_paths=["."]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,7 +15,6 @@ truncation_length: 2048
|
|||||||
truncation_length_min: 0
|
truncation_length_min: 0
|
||||||
truncation_length_max: 200000
|
truncation_length_max: 200000
|
||||||
max_tokens_second: 0
|
max_tokens_second: 0
|
||||||
max_updates_second: 0
|
|
||||||
prompt_lookup_num_tokens: 0
|
prompt_lookup_num_tokens: 0
|
||||||
custom_stopping_strings: ''
|
custom_stopping_strings: ''
|
||||||
custom_token_bans: ''
|
custom_token_bans: ''
|
||||||
|
Loading…
Reference in New Issue
Block a user