From b7c11d36e605b35206901d0e21905f1b99508e33 Mon Sep 17 00:00:00 2001 From: devojony <61173062+devojony@users.noreply.github.com> Date: Mon, 22 Jul 2024 14:54:42 +0800 Subject: [PATCH] examples: fix android example cannot be generated continuously (#8621) When generation ends `completion_loop()` should return a NULL, not the empty string --- examples/llama.android/llama/src/main/cpp/llama-android.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llama.android/llama/src/main/cpp/llama-android.cpp b/examples/llama.android/llama/src/main/cpp/llama-android.cpp index 92a6b16b1..2aafe2316 100644 --- a/examples/llama.android/llama/src/main/cpp/llama-android.cpp +++ b/examples/llama.android/llama/src/main/cpp/llama-android.cpp @@ -409,7 +409,7 @@ Java_android_llama_cpp_LLamaAndroid_completion_1loop( const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value); if (llama_token_is_eog(model, new_token_id) || n_cur == n_len) { - return env->NewStringUTF(""); + return nullptr; } auto new_token_chars = llama_token_to_piece(context, new_token_id);