mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-11-22 16:17:57 +01:00
Fix Training fails when evaluation dataset is selected (#2099)
Fixes https://github.com/oobabooga/text-generation-webui/issues/2078 from Googulator
This commit is contained in:
parent
428261eede
commit
d205ec9706
@ -380,10 +380,10 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch
|
|||||||
logging_steps=5,
|
logging_steps=5,
|
||||||
evaluation_strategy="steps" if eval_data is not None else "no",
|
evaluation_strategy="steps" if eval_data is not None else "no",
|
||||||
eval_steps=math.ceil(eval_steps / gradient_accumulation_steps) if eval_data is not None else None,
|
eval_steps=math.ceil(eval_steps / gradient_accumulation_steps) if eval_data is not None else None,
|
||||||
save_strategy="no",
|
save_strategy="steps" if eval_data is not None else "no",
|
||||||
output_dir=lora_file_path,
|
output_dir=lora_file_path,
|
||||||
lr_scheduler_type=lr_scheduler_type,
|
lr_scheduler_type=lr_scheduler_type,
|
||||||
load_best_model_at_end=True if eval_data is not None else False,
|
load_best_model_at_end=eval_data is not None,
|
||||||
# TODO: Enable multi-device support
|
# TODO: Enable multi-device support
|
||||||
ddp_find_unused_parameters=None,
|
ddp_find_unused_parameters=None,
|
||||||
no_cuda=shared.args.cpu
|
no_cuda=shared.args.cpu
|
||||||
|
Loading…
Reference in New Issue
Block a user