From b7dd1f95427d66b6ecb102058ba29f0d2726e780 Mon Sep 17 00:00:00 2001 From: B611 <35844889+B611@users.noreply.github.com> Date: Sun, 31 Dec 2023 05:34:32 +0100 Subject: [PATCH] Specify utf-8 encoding for model metadata file open (#5125) --- modules/models_settings.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/models_settings.py b/modules/models_settings.py index 4e1fb1ad..919a15bc 100644 --- a/modules/models_settings.py +++ b/modules/models_settings.py @@ -35,7 +35,7 @@ def get_model_metadata(model): path = Path(f'{shared.args.model_dir}/{model}/config.json') if path.exists(): - hf_metadata = json.loads(open(path, 'r').read()) + hf_metadata = json.loads(open(path, 'r', encoding='utf-8').read()) else: hf_metadata = None @@ -78,7 +78,7 @@ def get_model_metadata(model): else: # Transformers metadata if hf_metadata is not None: - metadata = json.loads(open(path, 'r').read()) + metadata = json.loads(open(path, 'r', encoding='utf-8').read()) if 'max_position_embeddings' in metadata: model_settings['truncation_length'] = metadata['max_position_embeddings'] model_settings['max_seq_len'] = metadata['max_position_embeddings'] @@ -101,7 +101,7 @@ def get_model_metadata(model): # Read AutoGPTQ metadata path = Path(f'{shared.args.model_dir}/{model}/quantize_config.json') if path.exists(): - metadata = json.loads(open(path, 'r').read()) + metadata = json.loads(open(path, 'r', encoding='utf-8').read()) if 'bits' in metadata: model_settings['wbits'] = metadata['bits'] if 'group_size' in metadata: @@ -112,7 +112,7 @@ def get_model_metadata(model): # Try to find the Jinja instruct template path = Path(f'{shared.args.model_dir}/{model}') / 'tokenizer_config.json' if path.exists(): - metadata = json.loads(open(path, 'r').read()) + metadata = json.loads(open(path, 'r', encoding='utf-8').read()) if 'chat_template' in metadata: template = metadata['chat_template'] for k in ['eos_token', 'bos_token']: