mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-29 15:44:18 +01:00
45abe0f74e
* server : replace behave with pytest * fix test on windows * misc * add more tests * more tests * styling * log less, fix embd test * added all sequential tests * fix coding style * fix save slot test * add parallel completion test * fix parallel test * remove feature files * update test docs * no cache_prompt for some tests * add test_cache_vs_nocache_prompt
60 lines
1.5 KiB
Python
60 lines
1.5 KiB
Python
import pytest
|
|
from utils import *
|
|
|
|
server = ServerPreset.tinyllama2()
|
|
|
|
|
|
@pytest.fixture(scope="module", autouse=True)
|
|
def create_server():
|
|
global server
|
|
server = ServerPreset.tinyllama2()
|
|
|
|
|
|
def test_tokenize_detokenize():
|
|
global server
|
|
server.start()
|
|
# tokenize
|
|
content = "What is the capital of France ?"
|
|
res_tok = server.make_request("POST", "/tokenize", data={
|
|
"content": content
|
|
})
|
|
assert res_tok.status_code == 200
|
|
assert len(res_tok.body["tokens"]) > 5
|
|
# detokenize
|
|
res_detok = server.make_request("POST", "/detokenize", data={
|
|
"tokens": res_tok.body["tokens"],
|
|
})
|
|
assert res_detok.status_code == 200
|
|
assert res_detok.body["content"].strip() == content
|
|
|
|
|
|
def test_tokenize_with_bos():
|
|
global server
|
|
server.start()
|
|
# tokenize
|
|
content = "What is the capital of France ?"
|
|
bosId = 1
|
|
res_tok = server.make_request("POST", "/tokenize", data={
|
|
"content": content,
|
|
"add_special": True,
|
|
})
|
|
assert res_tok.status_code == 200
|
|
assert res_tok.body["tokens"][0] == bosId
|
|
|
|
|
|
def test_tokenize_with_pieces():
|
|
global server
|
|
server.start()
|
|
# tokenize
|
|
content = "This is a test string with unicode 媽 and emoji 🤗"
|
|
res_tok = server.make_request("POST", "/tokenize", data={
|
|
"content": content,
|
|
"with_pieces": True,
|
|
})
|
|
assert res_tok.status_code == 200
|
|
for token in res_tok.body["tokens"]:
|
|
assert "id" in token
|
|
assert token["id"] > 0
|
|
assert "piece" in token
|
|
assert len(token["piece"]) > 0
|