fix: extra newline token in stream mode

This commit is contained in:
liuzhenghua-jk 2024-06-06 11:58:29 +08:00
parent 20ef30a46b
commit 61aff1fd8e
1 changed files with 2 additions and 1 deletions

View File

@ -484,7 +484,8 @@ async def predict_stream(model_id, gen_params):
object="chat.completion.chunk" object="chat.completion.chunk"
) )
yield "{}".format(chunk.model_dump_json(exclude_unset=True)) yield "{}".format(chunk.model_dump_json(exclude_unset=True))
if not has_send_first_chunk and output.startswith("\n"):
output = output[1:]
send_msg = delta_text if has_send_first_chunk else output send_msg = delta_text if has_send_first_chunk else output
has_send_first_chunk = True has_send_first_chunk = True
message = DeltaMessage( message = DeltaMessage(