diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index a88d970fe..cb4ef8c37 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -34,7 +34,7 @@ jobs: # mono occupies port 8084 which conflicts with mgmt-backend - name: Stop mono service run: | - sudo kill -9 `sudo lsof -t -i:8084` + sudo lsof -t -i:8084 | xargs -r sudo kill -9 sudo lsof -i -P -n | grep LISTEN - name: Maximize build space diff --git a/pkg/component/ai/perplexity/v0/.compogen/bottom.mdx b/pkg/component/ai/perplexity/v0/.compogen/bottom.mdx index a585e7749..21dca972e 100644 --- a/pkg/component/ai/perplexity/v0/.compogen/bottom.mdx +++ b/pkg/component/ai/perplexity/v0/.compogen/bottom.mdx @@ -15,7 +15,7 @@ component: task: TASK_CHAT input: data: - model: llama-3.1-sonar-small-128k-online + model: sonar messages: - content: - text: Be precise and concise. @@ -25,7 +25,7 @@ component: - text: ${variable.prompt} type: text role: user - name: chunhao + name: Miles parameter: max-tokens: 500 temperature: 0.2 @@ -37,12 +37,10 @@ component: top-k: 0 presence-penalty: 0 frequency-penalty: 1 - setup: - api-key: ${secret.perplexity} output: perplexity: title: Perplexity value: ${perplexity-0.output} -``` \ No newline at end of file +``` diff --git a/pkg/component/ai/perplexity/v0/README.mdx b/pkg/component/ai/perplexity/v0/README.mdx index 3361639cc..edeb43169 100644 --- a/pkg/component/ai/perplexity/v0/README.mdx +++ b/pkg/component/ai/perplexity/v0/README.mdx @@ -74,7 +74,7 @@ Input data. | Field | Field ID | Type | Note | | :--- | :--- | :--- | :--- | | [Chat Messages](#chat-chat-messages) | `messages` | array | List of chat messages. | -| Model Name | `model` | string | The model to be used for `TASK_CHAT`.
Enum values
| +| Model Name | `model` | string | The model to be used for `TASK_CHAT`.
Enum values
|

Chat Messages

@@ -204,7 +204,7 @@ component: task: TASK_CHAT input: data: - model: llama-3.1-sonar-small-128k-online + model: sonar messages: - content: - text: Be precise and concise. @@ -214,7 +214,7 @@ component: - text: ${variable.prompt} type: text role: user - name: chunhao + name: Miles parameter: max-tokens: 500 temperature: 0.2 @@ -226,12 +226,10 @@ component: top-k: 0 presence-penalty: 0 frequency-penalty: 1 - setup: - api-key: ${secret.perplexity} output: perplexity: title: Perplexity value: ${perplexity-0.output} -``` \ No newline at end of file +``` diff --git a/pkg/component/ai/perplexity/v0/config/tasks.yaml b/pkg/component/ai/perplexity/v0/config/tasks.yaml index b4f9d447c..3f63fe577 100644 --- a/pkg/component/ai/perplexity/v0/config/tasks.yaml +++ b/pkg/component/ai/perplexity/v0/config/tasks.yaml @@ -16,11 +16,18 @@ TASK_CHAT: shortDescription: The model to be used. type: string enum: + - sonar + - sonar-pro + + # Deprecated models, unavailable after 2025-02-22 - llama-3.1-sonar-small-128k-online - llama-3.1-sonar-large-128k-online - llama-3.1-sonar-huge-128k-online instillCredentialMap: values: + - sonar + - sonar-pro + - llama-3.1-sonar-small-128k-online - llama-3.1-sonar-large-128k-online - llama-3.1-sonar-huge-128k-online