From ae2f6ba5a4c239896f0c4ca1c6c179c5265a1236 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 12:14:58 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- comps/cores/mega/orchestrator.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/comps/cores/mega/orchestrator.py b/comps/cores/mega/orchestrator.py index 101452a51..8f0f20f2f 100644 --- a/comps/cores/mega/orchestrator.py +++ b/comps/cores/mega/orchestrator.py @@ -135,15 +135,15 @@ async def execute( if ( self.services[cur_node].service_type == ServiceType.LLM or self.services[cur_node].service_type == ServiceType.LVM - ): + ): if llm_parameters.streaming: llm_parameters_dict = llm_parameters.dict() - + for field, value in llm_parameters_dict.items(): if inputs.get(field) != value: inputs[field] = value # Still leave to sync requests.post for StreamingResponse - + response = requests.post( url=endpoint, data=json.dumps(inputs), proxies={"http": None}, stream=True, timeout=1000 ) @@ -153,7 +153,7 @@ async def execute( cur_node = downstream[0] hitted_ends = [".", "?", "!", "。", ",", "!"] downstream_endpoint = self.services[downstream[0]].endpoint_path - + def generate(): if response: buffered_chunk_str = "" @@ -178,7 +178,7 @@ def generate(): yield from self.token_generator(res_txt, is_last=is_last) else: yield chunk - + return StreamingResponse(generate(), media_type="text/event-stream"), cur_node else: async with session.post(endpoint, json=inputs) as response: