diff --git a/packages/components/credentials/CerebrasApi.credential.ts b/packages/components/credentials/CerebrasApi.credential.ts new file mode 100644 index 00000000000..5a701536f0f --- /dev/null +++ b/packages/components/credentials/CerebrasApi.credential.ts @@ -0,0 +1,25 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class CerebrasAPIAuth implements INodeCredential { + label: string + name: string + version: number + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Cerebras API Key' + this.name = 'cerebrasAIApi' + this.version = 1.0 + this.inputs = [ + { + label: 'Cerebras API Key', + name: 'cerebrasApiKey', + type: 'password', + description: 'API Key (cloud.cerebras.ai)' + } + ] + } +} + +module.exports = { credClass: CerebrasAPIAuth } diff --git a/packages/components/nodes/chatmodels/ChatCerebras/ChatCerebras.ts b/packages/components/nodes/chatmodels/ChatCerebras/ChatCerebras.ts new file mode 100644 index 00000000000..40951b3a8b4 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatCerebras/ChatCerebras.ts @@ -0,0 +1,161 @@ +import { ChatOpenAI, OpenAIChatInput } from '@langchain/openai' +import { BaseCache } from '@langchain/core/caches' +import { BaseLLMParams } from '@langchain/core/language_models/llms' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatCerebras_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatCerebras' + this.name = 'chatCerebras' + this.version = 1.0 + this.type = 'ChatCerebras' + this.icon = 'cerebras.png' + this.category = 'Chat Models' + this.description = 'Models available via Cerebras' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['cerebrasAIApi'], + optional: true + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + placeholder: 'llama3.1-8b' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + default: 'https://api.cerebras.ai/v1', + additionalParams: true + }, + { + label: 'BaseOptions', + name: 'baseOptions', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basepath as string + const baseOptions = nodeData.inputs?.baseOptions + const cache = nodeData.inputs?.cache as BaseCache + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const cerebrasAIApiKey = getCredentialParam('cerebrasApiKey', credentialData, nodeData) + + const obj: Partial & BaseLLMParams & { cerebrasAIApiKey?: string } = { + temperature: parseFloat(temperature), + modelName, + openAIApiKey: cerebrasAIApiKey, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseFloat(topP) + if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty) + if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty) + if (timeout) obj.timeout = parseInt(timeout, 10) + if (cache) obj.cache = cache + + let parsedBaseOptions: any | undefined = undefined + + if (baseOptions) { + try { + parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) + } catch (exception) { + throw new Error("Invalid JSON in the ChatCerebras's BaseOptions: " + exception) + } + } + const model = new ChatOpenAI(obj, { + basePath, + baseOptions: parsedBaseOptions + }) + return model + } +} + +module.exports = { nodeClass: ChatCerebras_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatCerebras/cerebras.png b/packages/components/nodes/chatmodels/ChatCerebras/cerebras.png new file mode 100644 index 00000000000..0b12258f1cc Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatCerebras/cerebras.png differ