mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2025-02-04 23:52:32 +01:00
Flush stdout in chat template before potential crash
This commit is contained in:
parent
e63520f37a
commit
33322e823e
@ -291,6 +291,7 @@ int main(void) {
|
||||
printf("Expected:\n%s\n", test_case.expected_output.c_str());
|
||||
printf("-------------------------\n");
|
||||
printf("Actual:\n%s\n", output.c_str());
|
||||
fflush(stdout);
|
||||
assert(output == test_case.expected_output);
|
||||
}
|
||||
}
|
||||
@ -315,6 +316,7 @@ int main(void) {
|
||||
printf("Expected:\n%s\n", expected_output.c_str());
|
||||
printf("-------------------------\n");
|
||||
printf("Actual:\n%s\n", output.c_str());
|
||||
fflush(stdout);
|
||||
assert(output == expected_output);
|
||||
}
|
||||
} catch (const std::exception & e) {
|
||||
|
Loading…
Reference in New Issue
Block a user