llama : prevent system info string accumulation across calls (#11101)

This commit is contained in:
Asghar Ghorbani 2025-01-06 12:21:46 +01:00 committed by GitHub
parent 6369f867a4
commit 96a1dc27c3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -12458,6 +12458,8 @@ int llama_split_prefix(char * dest, size_t maxlen, const char * split_path, int
const char * llama_print_system_info(void) { const char * llama_print_system_info(void) {
static std::string s; static std::string s;
s.clear(); // Clear the string, since it's static, otherwise it will accumulate data from previous calls.
for (size_t i = 0; i < ggml_backend_reg_count(); i++) { for (size_t i = 0; i < ggml_backend_reg_count(); i++) {
auto * reg = ggml_backend_reg_get(i); auto * reg = ggml_backend_reg_get(i);