mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-10-29 22:20:15 +01:00
lora : warn user if new token is added in the adapter (#9948)
This commit is contained in:
parent
4ff7fe1fb3
commit
c421ac072d
@ -348,6 +348,9 @@ if __name__ == '__main__':
|
|||||||
if ".base_layer.weight" in name:
|
if ".base_layer.weight" in name:
|
||||||
continue
|
continue
|
||||||
logger.error(f"Unexpected name '{name}': Not a lora_A or lora_B tensor")
|
logger.error(f"Unexpected name '{name}': Not a lora_A or lora_B tensor")
|
||||||
|
if ".embed_tokens.weight" in name or ".lm_head.weight" in name:
|
||||||
|
logger.error("Embeddings is present in the adapter. This can be due to new tokens added during fine tuning")
|
||||||
|
logger.error("Hint: if you are using TRL, make sure not to call setup_chat_format()")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if base_name in tensor_map:
|
if base_name in tensor_map:
|
||||||
|
Loading…
Reference in New Issue
Block a user