diff --git a/extensions/engine-management-extension/resources/openai.json b/extensions/engine-management-extension/resources/openai.json index bda24bc384..97effd42a6 100644 --- a/extensions/engine-management-extension/resources/openai.json +++ b/extensions/engine-management-extension/resources/openai.json @@ -10,7 +10,7 @@ "transform_req": { "chat_completions": { "url": "https://api.openai.com/v1/chat/completions", - "template": "{ {% set first = true %}{% for key, value in input_request %}{% if key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"messages\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" or (not \"o1\" in input_request.model and (key == \"max_tokens\" or key == \"stop\")) %} {% if key == \"max_tokens\" and \"o1\" in input_request.model %} \"max_completion_tokens\": {{ tojson(value) }} {% else %} {% if not first %},{% endif %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endif %} {% endfor %} }" + "template": "{ {% set first = true %} {% for key, value in input_request %} {% if key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"messages\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" or key == \"max_tokens\" or ((input_request.model == \"o1\" or input_request.model == \"o1-preview\" or input_request.model == \"o1-mini\") and (key == \"stop\")) %} {% if not first %} , {% endif %} {% if key == \"messages\" and (input_request.model == \"o1\" or input_request.model == \"o1-preview\" or input_request.model == \"o1-mini\") and input_request.messages.0.role == \"system\" %} \"messages\": [{% for message in input_request.messages %} {% if not loop.is_first %} { \"role\": \"{{ message.role }}\", \"content\": \"{{ message.content }}\" } {% if not loop.is_last %} , {% endif %} {% endif %} {% endfor %}] {% else if key == \"max_tokens\" and (input_request.model == \"o1\" or input_request.model == \"o1-preview\" or input_request.model == \"o1-mini\") %} \"max_completion_tokens\": {{ tojson(value) }} {% else %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endif %} {% endfor %} }" } }, "transform_resp": { diff --git a/web/screens/Settings/Engines/index.tsx b/web/screens/Settings/Engines/index.tsx index 8a797f46fc..ded8b5a909 100644 --- a/web/screens/Settings/Engines/index.tsx +++ b/web/screens/Settings/Engines/index.tsx @@ -23,7 +23,11 @@ const Engines = () => { {engines && Object.entries(engines).map(([key]) => { - if (!isLocalEngine(engines, key as InferenceEngine)) return + if ( + !isLocalEngine(engines, key as InferenceEngine) || + !engines[key as InferenceEngine].length + ) + return return ( ) @@ -40,7 +44,11 @@ const Engines = () => { {engines && Object.entries(engines).map(([key, values]) => { - if (isLocalEngine(engines, key as InferenceEngine)) return + if ( + isLocalEngine(engines, key as InferenceEngine) || + !values.length + ) + return return ( { {engines && Object.entries(engines) .filter( - ([key]) => !showSettingActiveLocalEngine.includes(key) + ([key]) => + !showSettingActiveLocalEngine.includes(key) && + engines[key as InferenceEngine].length > 0 ) .map(([key]) => { if (!isLocalEngine(engines, key as InferenceEngine)) return @@ -119,7 +121,9 @@ const SettingLeftPanel = () => { {engines && Object.entries(engines) .filter( - ([key]) => !showSettingActiveRemoteEngine.includes(key) + ([key]) => + !showSettingActiveRemoteEngine.includes(key) && + engines[key as InferenceEngine].length > 0 ) .map(([key]) => { if (isLocalEngine(engines, key as InferenceEngine)) return