Style changes

This commit is contained in:
oobabooga 2023-07-11 18:49:06 -07:00
parent bfafd07f44
commit e3810dff40

View File

@ -1,19 +1,17 @@
import json import json
import math import math
import random import random
import shutil
import sys import sys
import threading import threading
import time import time
import traceback import traceback
from datetime import datetime
from pathlib import Path from pathlib import Path
import gradio as gr import gradio as gr
import torch import torch
import transformers import transformers
import shutil
from datetime import datetime
from datasets import Dataset, load_dataset from datasets import Dataset, load_dataset
from peft import ( from peft import (
LoraConfig, LoraConfig,
@ -240,6 +238,7 @@ def backup_adapter(input_folder):
except Exception as e: except Exception as e:
print("An error occurred in backup_adapter:", str(e)) print("An error occurred in backup_adapter:", str(e))
def calc_trainable_parameters(model): def calc_trainable_parameters(model):
trainable_params = 0 trainable_params = 0
all_param = 0 all_param = 0
@ -563,7 +562,6 @@ def do_train(lora_name: str, always_override: bool, save_steps: int, micro_batch
if lora_all_param > 0: if lora_all_param > 0:
print(f"Trainable params: {lora_trainable_param:,d} ({100 * lora_trainable_param / lora_all_param:.4f} %), All params: {lora_all_param:,d} (Model: {model_all_params:,d})") print(f"Trainable params: {lora_trainable_param:,d} ({100 * lora_trainable_param / lora_all_param:.4f} %), All params: {lora_all_param:,d} (Model: {model_all_params:,d})")
train_log.update({"base_model_name": shared.model_name}) train_log.update({"base_model_name": shared.model_name})
train_log.update({"base_model_class": shared.model.__class__.__name__}) train_log.update({"base_model_class": shared.model.__class__.__name__})
train_log.update({"base_loaded_in_4bit": getattr(lora_model, "is_loaded_in_4bit", False)}) train_log.update({"base_loaded_in_4bit": getattr(lora_model, "is_loaded_in_4bit", False)})