fix: extra newline token in stream mode
This commit is contained in:
parent
61aff1fd8e
commit
8dd5ad6fa1
|
@ -484,9 +484,7 @@ async def predict_stream(model_id, gen_params):
|
||||||
object="chat.completion.chunk"
|
object="chat.completion.chunk"
|
||||||
)
|
)
|
||||||
yield "{}".format(chunk.model_dump_json(exclude_unset=True))
|
yield "{}".format(chunk.model_dump_json(exclude_unset=True))
|
||||||
if not has_send_first_chunk and output.startswith("\n"):
|
send_msg = delta_text if has_send_first_chunk else output[1:] if output.startswith("\n") else output
|
||||||
output = output[1:]
|
|
||||||
send_msg = delta_text if has_send_first_chunk else output
|
|
||||||
has_send_first_chunk = True
|
has_send_first_chunk = True
|
||||||
message = DeltaMessage(
|
message = DeltaMessage(
|
||||||
content=send_msg,
|
content=send_msg,
|
||||||
|
|
Loading…
Reference in New Issue