mirror of
https://github.com/ggerganov/llama.cpp.git
synced 2025-01-03 17:51:09 +01:00
main : restore old EOS behavior in interactive mode
This commit is contained in:
parent
dadbed99e6
commit
8af1991e2a
@ -634,6 +634,11 @@ int main(int argc, char ** argv) {
|
||||
llama_grammar_accept_token(ctx, grammar, id);
|
||||
}
|
||||
|
||||
// replace end of text token with newline token when in interactive mode
|
||||
if (id == llama_token_eos() && params.interactive && !params.instruct && !params.input_prefix_bos) {
|
||||
id = llama_token_nl();
|
||||
}
|
||||
|
||||
last_n_tokens.erase(last_n_tokens.begin());
|
||||
last_n_tokens.push_back(id);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user