Increase the sequence length, decrease batch size

I have no idea what I am doing
This commit is contained in:
oobabooga 2023-03-03 15:54:13 -03:00
parent e62b9b1074
commit 5a79863df3

View File

@ -70,7 +70,7 @@ class LLaMAModel:
pass pass
@classmethod @classmethod
def from_pretrained(self, path, max_seq_len=512, max_batch_size=32): def from_pretrained(self, path, max_seq_len=2048, max_batch_size=1):
tokenizer_path = path / "tokenizer.model" tokenizer_path = path / "tokenizer.model"
path = os.path.abspath(path) path = os.path.abspath(path)
tokenizer_path = os.path.abspath(tokenizer_path) tokenizer_path = os.path.abspath(tokenizer_path)