fix error from prepare call running twice in a row

This commit is contained in:
Alex "mcmonkey" Goodwin 2023-03-25 12:37:32 -07:00
parent 7bf601107c
commit 5c49a0dcd0

View File

@ -90,7 +90,8 @@ def do_train(loraName: str, microBatchSize: int, batchSize: int, epochs: int, le
evalData = load_dataset("json", data_files=cleanPath('training/datasets', f'{evalDataset}.json')) evalData = load_dataset("json", data_files=cleanPath('training/datasets', f'{evalDataset}.json'))
evalData = evalData['train'].shuffle().map(generate_and_tokenize_prompt) evalData = evalData['train'].shuffle().map(generate_and_tokenize_prompt)
# Start prepping the model itself # Start prepping the model itself
model = prepare_model_for_int8_training(model) if not hasattr(model, 'lm_head') or hasattr(model.lm_head, 'weight'):
model = prepare_model_for_int8_training(model)
config = LoraConfig( config = LoraConfig(
r=loraRank, r=loraRank,
lora_alpha=loraAlpha, lora_alpha=loraAlpha,