From 0571034b55b920db83ecc9bb563bcb03bf57c56a Mon Sep 17 00:00:00 2001 From: Daniel Date: Fri, 13 Sep 2024 11:15:33 +0200 Subject: [PATCH] fixed bug in AsyncUnify streaming. --- unify/chat/clients/uni_llm.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unify/chat/clients/uni_llm.py b/unify/chat/clients/uni_llm.py index eb4dc22..546782b 100644 --- a/unify/chat/clients/uni_llm.py +++ b/unify/chat/clients/uni_llm.py @@ -613,7 +613,8 @@ async def _generate_stream( self.set_provider(chunk.model.split("@")[-1]) if message_content_only: yield chunk.choices[0].delta.content or "" - yield chunk + else: + yield chunk except openai.APIStatusError as e: raise Exception(e.message)