mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2024-12-25 05:48:47 +01:00
llama : fix typical sampling (#4261)
Typical sampling was broken because after copying new_candidates into canditates, the "sorted" bool is left at "true", but the new data is no longer sorted according to probability. Patch to set "sorted" to false. Test: Generating with temp=0.0001 (approx. argmax) should generate the same sequence at typical>=1.0 and typical=0.9999 (approx. disabled, but enters the typical sampling codepath).
This commit is contained in:
parent
e2bd725f4b
commit
954e22858c
@ -7027,6 +7027,7 @@ void llama_sample_typical(struct llama_context * ctx, llama_token_data_array * c
|
|||||||
// Replace the data in candidates with the new_candidates data
|
// Replace the data in candidates with the new_candidates data
|
||||||
std::copy(new_candidates.begin(), new_candidates.end(), candidates->data);
|
std::copy(new_candidates.begin(), new_candidates.end(), candidates->data);
|
||||||
candidates->size = new_candidates.size();
|
candidates->size = new_candidates.size();
|
||||||
|
candidates->sorted = false;
|
||||||
|
|
||||||
if (ctx) {
|
if (ctx) {
|
||||||
ctx->t_sample_us += ggml_time_us() - t_start_sample_us;
|
ctx->t_sample_us += ggml_time_us() - t_start_sample_us;
|
||||||
|
Loading…
Reference in New Issue
Block a user