From 97c21e5667437a706b5aaeb3f6600890003d63c1 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:09:41 -0800 Subject: [PATCH] Don't strip leading spaces in OpenAI API --- extensions/openai/completions.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/extensions/openai/completions.py b/extensions/openai/completions.py index 3346148e..f01282f2 100644 --- a/extensions/openai/completions.py +++ b/extensions/openai/completions.py @@ -287,13 +287,7 @@ def chat_completions_common(body: dict, is_legacy: bool = False, stream=False) - continue seen_content = answer - - # strip extra leading space off new generated content - if len_seen == 0 and new_content[0] == ' ': - new_content = new_content[1:] - chunk = chat_streaming_chunk(new_content) - yield chunk completion_token_count = len(encode(answer)[0]) @@ -390,10 +384,6 @@ def completions_common(body: dict, is_legacy: bool = False, stream=False): for a in generator: answer = a - # strip extra leading space off new generated content - if answer and answer[0] == ' ': - answer = answer[1:] - completion_token_count = len(encode(answer)[0]) total_completion_token_count += completion_token_count stop_reason = "stop" @@ -474,19 +464,9 @@ def completions_common(body: dict, is_legacy: bool = False, stream=False): continue seen_content = answer - - # strip extra leading space off new generated content - if len_seen == 0 and new_content[0] == ' ': - new_content = new_content[1:] - chunk = text_streaming_chunk(new_content) - yield chunk - # to get the correct count, we strip the leading space if present - if answer and answer[0] == ' ': - answer = answer[1:] - completion_token_count = len(encode(answer)[0]) stop_reason = "stop" if token_count + completion_token_count >= generate_params['truncation_length'] or completion_token_count >= max_tokens: