fix: extra newline token in stream mode
This commit is contained in:
parent
20ef30a46b
commit
61aff1fd8e
|
@ -484,7 +484,8 @@ async def predict_stream(model_id, gen_params):
|
|||
object="chat.completion.chunk"
|
||||
)
|
||||
yield "{}".format(chunk.model_dump_json(exclude_unset=True))
|
||||
|
||||
if not has_send_first_chunk and output.startswith("\n"):
|
||||
output = output[1:]
|
||||
send_msg = delta_text if has_send_first_chunk else output
|
||||
has_send_first_chunk = True
|
||||
message = DeltaMessage(
|
||||
|
|
Loading…
Reference in New Issue