mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-26 22:30:32 +01:00
examples: fix android example cannot be generated continuously (#8621)
When generation ends `completion_loop()` should return a NULL, not the empty string
This commit is contained in:
parent
45f2c19cc5
commit
b7c11d36e6
@ -409,7 +409,7 @@ Java_android_llama_cpp_LLamaAndroid_completion_1loop(
|
|||||||
|
|
||||||
const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value);
|
const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value);
|
||||||
if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) {
|
if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) {
|
||||||
return env->NewStringUTF("");
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto new_token_chars = llama_token_to_piece(context, new_token_id);
|
auto new_token_chars = llama_token_to_piece(context, new_token_id);
|
||||||
|
Loading…
Reference in New Issue
Block a user