fix: extra newline token in stream mode

This commit is contained in:
liuzhenghua-jk 2024-06-06 12:07:26 +08:00
parent 61aff1fd8e
commit 8dd5ad6fa1
1 changed files with 1 additions and 3 deletions

View File

@ -484,9 +484,7 @@ async def predict_stream(model_id, gen_params):
object="chat.completion.chunk" object="chat.completion.chunk"
) )
yield "{}".format(chunk.model_dump_json(exclude_unset=True)) yield "{}".format(chunk.model_dump_json(exclude_unset=True))
if not has_send_first_chunk and output.startswith("\n"): send_msg = delta_text if has_send_first_chunk else output[1:] if output.startswith("\n") else output
output = output[1:]
send_msg = delta_text if has_send_first_chunk else output
has_send_first_chunk = True has_send_first_chunk = True
message = DeltaMessage( message = DeltaMessage(
content=send_msg, content=send_msg,