From a85ce5f055d2fdfec4155b49e59f3c9ee929300f Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Wed, 15 Nov 2023 16:13:36 -0800 Subject: [PATCH] Add more info messages for truncation / instruction template --- extensions/openai/models.py | 5 +++++ extensions/openai/script.py | 2 +- modules/models.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/extensions/openai/models.py b/extensions/openai/models.py index c0a4606c..1ff950a2 100644 --- a/extensions/openai/models.py +++ b/extensions/openai/models.py @@ -1,4 +1,5 @@ from modules import shared +from modules.logging_colors import logger from modules.models import load_model, unload_model from modules.models_settings import get_model_metadata, update_model_parameters from modules.utils import get_available_models @@ -62,3 +63,7 @@ def _load_model(data): for k in settings: if k in shared.settings: shared.settings[k] = settings[k] + if k == 'truncation_length': + logger.info(f"TRUNCATION LENGTH (UPDATED): {shared.settings['truncation_length']}") + elif k == 'instruction_template': + logger.info(f"INSTRUCTION TEMPLATE (UPDATED): {shared.settings['instruction_template']}") diff --git a/extensions/openai/script.py b/extensions/openai/script.py index ec0ceeef..c7f61abe 100644 --- a/extensions/openai/script.py +++ b/extensions/openai/script.py @@ -287,7 +287,7 @@ async def handle_load_model(request_data: LoadModelRequest): @app.post("/v1/internal/model/unload") -async def handle_load_model(): +async def handle_unload_model(): unload_model() return JSONResponse(content="OK") diff --git a/modules/models.py b/modules/models.py index de21c8b3..e4c3ddaa 100644 --- a/modules/models.py +++ b/modules/models.py @@ -102,7 +102,7 @@ def load_model(model_name, loader=None): elif loader in ['llama.cpp', 'llamacpp_HF', 'ctransformers']: shared.settings['truncation_length'] = shared.args.n_ctx - logger.info(f"CONTEXT LENGTH: {shared.settings['truncation_length']}") + logger.info(f"TRUNCATION LENGTH: {shared.settings['truncation_length']}") logger.info(f"INSTRUCTION TEMPLATE: {shared.settings['instruction_template']}") logger.info(f"Loaded the model in {(time.time()-t0):.2f} seconds.") return model, tokenizer