From 0ba47403f3f33e091cc78287ac08f05a334cd9bf Mon Sep 17 00:00:00 2001 From: sdp Date: Mon, 31 Jul 2023 01:28:28 -0700 Subject: [PATCH] fix bug in chunk.json() --- openai_api.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openai_api.py b/openai_api.py index 7225562..37e1833 100644 --- a/openai_api.py +++ b/openai_api.py @@ -3,7 +3,7 @@ # Usage: python openai_api.py # Visit http://localhost:8000/docs for documents. - +import json import time import torch import uvicorn @@ -135,7 +135,7 @@ async def predict(query: str, history: List[List[str]], model_id: str): finish_reason=None ) chunk = ChatCompletionResponse(model=model_id, choices=[choice_data], object="chat.completion.chunk") - yield "{}".format(chunk.json(exclude_unset=True, ensure_ascii=False)) + yield json.dumps(chunk.model_dump(exclude_unset=True), ensure_ascii=False) current_length = 0 @@ -152,7 +152,7 @@ async def predict(query: str, history: List[List[str]], model_id: str): finish_reason=None ) chunk = ChatCompletionResponse(model=model_id, choices=[choice_data], object="chat.completion.chunk") - yield "{}".format(chunk.json(exclude_unset=True, ensure_ascii=False)) + yield json.dumps(chunk.model_dump(exclude_unset=True), ensure_ascii=False) choice_data = ChatCompletionResponseStreamChoice( @@ -161,7 +161,7 @@ async def predict(query: str, history: List[List[str]], model_id: str): finish_reason="stop" ) chunk = ChatCompletionResponse(model=model_id, choices=[choice_data], object="chat.completion.chunk") - yield "{}".format(chunk.json(exclude_unset=True, ensure_ascii=False)) + yield json.dumps(chunk.model_dump(exclude_unset=True), ensure_ascii=False) yield '[DONE]'