From 250fe4143f15c7fe7b0d4e55e3ea14f3ed811261 Mon Sep 17 00:00:00 2001 From: pandyamarut Date: Wed, 6 Nov 2024 15:47:42 -0500 Subject: [PATCH] fix non streaming response Signed-off-by: pandyamarut --- src/handler.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/handler.py b/src/handler.py index 6a1df61..393e6e2 100644 --- a/src/handler.py +++ b/src/handler.py @@ -21,10 +21,16 @@ async def async_handler(job): openai_url = f"{engine.base_url}" + openai_route headers = {"Content-Type": "application/json"} - response = requests.post(openai_url, headers=headers, json=openai_input, stream=True) + response = requests.post(openai_url, headers=headers, json=openai_input) # Process the streamed response - for formated_chunk in process_response(response): - yield formated_chunk + if openai_input.get("stream", False): + for formated_chunk in process_response(response): + yield formated_chunk + else: + for chunk in response.iter_lines(): + if chunk: + decoded_chunk = chunk.decode('utf-8') + yield decoded_chunk else: generate_url = f"{engine.base_url}/generate" headers = {"Content-Type": "application/json"}