2024-07-23 12:10:17 +02:00
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#define LLAMA_API_INTERNAL
|
|
|
|
#include "llama.h"
|
|
|
|
|
|
|
|
#ifdef __GNUC__
|
|
|
|
#ifdef __MINGW32__
|
|
|
|
#define LLAMA_ATTRIBUTE_FORMAT(...) __attribute__((format(gnu_printf, __VA_ARGS__)))
|
|
|
|
#else
|
|
|
|
#define LLAMA_ATTRIBUTE_FORMAT(...) __attribute__((format(printf, __VA_ARGS__)))
|
|
|
|
#endif
|
|
|
|
#else
|
|
|
|
#define LLAMA_ATTRIBUTE_FORMAT(...)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
//
|
|
|
|
// logging
|
|
|
|
//
|
|
|
|
|
|
|
|
LLAMA_ATTRIBUTE_FORMAT(2, 3)
|
|
|
|
void llama_log_internal (ggml_log_level level, const char * format, ...);
|
|
|
|
void llama_log_callback_default(ggml_log_level level, const char * text, void * user_data);
|
|
|
|
|
|
|
|
#define LLAMA_LOG_INFO(...) llama_log_internal(GGML_LOG_LEVEL_INFO , __VA_ARGS__)
|
|
|
|
#define LLAMA_LOG_WARN(...) llama_log_internal(GGML_LOG_LEVEL_WARN , __VA_ARGS__)
|
|
|
|
#define LLAMA_LOG_ERROR(...) llama_log_internal(GGML_LOG_LEVEL_ERROR, __VA_ARGS__)
|
2024-08-09 17:23:52 +02:00
|
|
|
|
|
|
|
//
|
|
|
|
// helpers
|
|
|
|
//
|
|
|
|
|
|
|
|
static void replace_all(std::string & s, const std::string & search, const std::string & replace) {
|
|
|
|
if (search.empty()) {
|
2024-08-26 08:09:53 +02:00
|
|
|
return;
|
2024-08-09 17:23:52 +02:00
|
|
|
}
|
2024-08-26 08:09:53 +02:00
|
|
|
std::string builder;
|
|
|
|
builder.reserve(s.length());
|
2024-08-09 17:23:52 +02:00
|
|
|
size_t pos = 0;
|
2024-08-26 08:09:53 +02:00
|
|
|
size_t last_pos = 0;
|
|
|
|
while ((pos = s.find(search, last_pos)) != std::string::npos) {
|
|
|
|
builder.append(s, last_pos, pos - last_pos);
|
|
|
|
builder.append(replace);
|
|
|
|
last_pos = pos + search.length();
|
2024-08-09 17:23:52 +02:00
|
|
|
}
|
2024-08-26 08:09:53 +02:00
|
|
|
builder.append(s, last_pos, std::string::npos);
|
|
|
|
s = std::move(builder);
|
2024-08-09 17:23:52 +02:00
|
|
|
}
|