From 8ee3cea7cb376478236499508ca884fa5fbba1fb Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Tue, 6 Feb 2024 06:31:27 -0800 Subject: [PATCH] Improve some log messages --- modules/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/models.py b/modules/models.py index 6c38c3c7..5929e868 100644 --- a/modules/models.py +++ b/modules/models.py @@ -100,9 +100,9 @@ def load_model(model_name, loader=None): elif loader in ['llama.cpp', 'llamacpp_HF', 'ctransformers']: shared.settings['truncation_length'] = shared.args.n_ctx - logger.info(f"LOADER: {loader}") + logger.info(f"LOADER: \"{loader}\"") logger.info(f"TRUNCATION LENGTH: {shared.settings['truncation_length']}") - logger.info(f"INSTRUCTION TEMPLATE: {metadata['instruction_template']}") + logger.info(f"INSTRUCTION TEMPLATE: \"{metadata['instruction_template']}\"") logger.info(f"Loaded the model in {(time.time()-t0):.2f} seconds.") return model, tokenizer