feat(Groq Chat Model Node): Add support for Groq chat models (#9250)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Michael Kret <michael.k@radency.com>
This commit is contained in:
oleg
2024-04-30 09:37:30 +02:00
committed by GitHub
parent abae63574b
commit 96f02bd655
7 changed files with 276 additions and 3 deletions

View File

@@ -0,0 +1,41 @@
import type {
IAuthenticateGeneric,
ICredentialTestRequest,
ICredentialType,
INodeProperties,
} from 'n8n-workflow';
export class GroqApi implements ICredentialType {
name = 'groqApi';
displayName = 'Groq';
documentationUrl = 'groq';
properties: INodeProperties[] = [
{
displayName: 'API Key',
name: 'apiKey',
type: 'string',
typeOptions: { password: true },
required: true,
default: '',
},
];
authenticate: IAuthenticateGeneric = {
type: 'generic',
properties: {
headers: {
Authorization: '=Bearer {{$credentials.apiKey}}',
},
},
};
test: ICredentialTestRequest = {
request: {
baseURL: 'https://api.groq.com/openai/v1',
url: '/models',
},
};
}

View File

@@ -72,6 +72,7 @@ function getInputs(
filter: {
nodes: [
'@n8n/n8n-nodes-langchain.lmChatAnthropic',
'@n8n/n8n-nodes-langchain.lmChatGroq',
'@n8n/n8n-nodes-langchain.lmChatOllama',
'@n8n/n8n-nodes-langchain.lmChatOpenAi',
'@n8n/n8n-nodes-langchain.lmChatGooglePalm',

View File

@@ -0,0 +1,151 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import {
NodeConnectionType,
type IExecuteFunctions,
type INodeType,
type INodeTypeDescription,
type SupplyData,
} from 'n8n-workflow';
import { ChatGroq } from '@langchain/groq';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
export class LmChatGroq implements INodeType {
description: INodeTypeDescription = {
displayName: 'Groq Chat Model',
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
name: 'lmChatGroq',
icon: 'file:groq.svg',
group: ['transform'],
version: 1,
description: 'Language Model Groq',
defaults: {
name: 'Groq Chat Model',
},
codex: {
categories: ['AI'],
subcategories: {
AI: ['Language Models'],
},
resources: {
primaryDocumentation: [
{
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgroq/',
},
],
},
},
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
name: 'groqApi',
required: true,
},
],
requestDefaults: {
baseURL: 'https://api.groq.com/openai/v1',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
{
displayName: 'Model',
name: 'model',
type: 'options',
typeOptions: {
loadOptions: {
routing: {
request: {
method: 'GET',
url: '/models',
},
output: {
postReceive: [
{
type: 'rootProperty',
properties: {
property: 'data',
},
},
{
type: 'filter',
properties: {
pass: '={{ $responseItem.active === true && $responseItem.object === "model" }}',
},
},
{
type: 'setKeyValue',
properties: {
name: '={{$responseItem.id}}',
value: '={{$responseItem.id}}',
},
},
],
},
},
},
},
routing: {
send: {
type: 'body',
property: 'model',
},
},
description:
'The model which will generate the completion. <a href="https://console.groq.com/docs/models">Learn more</a>.',
default: 'llama3-8b-8192',
},
{
displayName: 'Options',
name: 'options',
placeholder: 'Add Option',
description: 'Additional options to add',
type: 'collection',
default: {},
options: [
{
displayName: 'Maximum Number of Tokens',
name: 'maxTokensToSample',
default: 4096,
description: 'The maximum number of tokens to generate in the completion',
type: 'number',
},
{
displayName: 'Sampling Temperature',
name: 'temperature',
default: 0.7,
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
description:
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
type: 'number',
},
],
},
],
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('groqApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as {
maxTokensToSample?: number;
temperature: number;
};
const model = new ChatGroq({
apiKey: credentials.apiKey as string,
modelName,
maxTokens: options.maxTokensToSample,
temperature: options.temperature,
});
return {
response: logWrapper(model, this),
};
}
}

View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
id="Layer_2"
viewBox="0 0 499.99999 499.99999"
version="1.1"
width="500"
height="500"
xml:space="preserve"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"><defs
id="defs4" /><g
id="PAGES"><circle
style="fill:#f54f35;fill-opacity:1;stroke-width:1.13622"
id="path4"
cx="250"
cy="250"
r="250" /><path
d="M 250.53664,97.122994 C 192.71931,96.588638 145.48222,142.97075 144.94786,200.78808 c -0.53434,57.81733 45.84777,105.05442 103.6651,105.58877 h 36.33621 v -39.22174 h -34.41253 c -36.12248,0.4275 -65.7258,-28.53462 -66.15329,-64.65708 -0.42749,-36.12248 28.53463,-65.72581 64.65708,-66.1533 h 1.49621 c 36.12248,0 65.4052,29.28272 65.51207,65.4052 v 0 96.39783 0 c 0,35.80187 -29.17585,64.97773 -64.87083,65.40521 -17.09941,-0.10688 -33.45071,-7.05351 -45.52717,-19.12995 l -27.7865,27.78651 c 19.23681,19.3437 45.31339,30.35143 72.56556,30.67205 h 1.38933 c 57.06924,-0.85497 102.917,-47.13022 103.2376,-104.19945 V 199.29189 C 353.66739,142.43639 307.28527,97.122994 250.53664,97.122994 Z"
style="fill:#ffffff;stroke-width:0px"
id="path1-3" /></g></svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -32,6 +32,7 @@
"dist/credentials/AzureOpenAiApi.credentials.js",
"dist/credentials/CohereApi.credentials.js",
"dist/credentials/GooglePalmApi.credentials.js",
"dist/credentials/GroqApi.credentials.js",
"dist/credentials/HuggingFaceApi.credentials.js",
"dist/credentials/MotorheadApi.credentials.js",
"dist/credentials/MistralCloudApi.credentials.js",
@@ -70,6 +71,7 @@
"dist/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.js",
"dist/nodes/llms/LmChatGooglePalm/LmChatGooglePalm.node.js",
"dist/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.js",
"dist/nodes/llms/LmChatGroq/LmChatGroq.node.js",
"dist/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.js",
"dist/nodes/llms/LMChatOllama/LmChatOllama.node.js",
"dist/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.js",
@@ -140,6 +142,7 @@
"@langchain/community": "0.0.53",
"@langchain/core": "0.1.61",
"@langchain/google-genai": "^0.0.12",
"@langchain/groq": "^0.0.8",
"@langchain/mistralai": "0.0.19",
"@langchain/openai": "^0.0.28",
"@langchain/pinecone": "^0.0.4",

View File

@@ -27,7 +27,7 @@
}}
</n8n-tooltip>
</li>
<li v-if="(consumedTokensSum?.totalTokens ?? 0) > 0">
<li v-if="(consumedTokensSum?.totalTokens ?? 0) > 0" :class="$style.tokensUsage">
{{
$locale.baseText('runData.aiContentBlock.tokens', {
interpolate: {
@@ -197,4 +197,9 @@ const runMeta = computed(() => {
padding-left: var(--spacing-3xs);
}
}
.tokensUsage {
display: flex;
align-items: center;
gap: var(--spacing-3xs);
}
</style>

56
pnpm-lock.yaml generated
View File

@@ -252,6 +252,9 @@ importers:
'@langchain/google-genai':
specifier: ^0.0.12
version: 0.0.12
'@langchain/groq':
specifier: ^0.0.8
version: 0.0.8
'@langchain/mistralai':
specifier: 0.0.19
version: 0.0.19
@@ -5199,7 +5202,7 @@ packages:
dependencies:
lodash.camelcase: 4.3.0
long: 5.2.3
protobufjs: 7.2.4
protobufjs: 7.2.6
yargs: 17.7.2
dev: false
@@ -6324,6 +6327,20 @@ packages:
'@langchain/core': 0.1.61
dev: false
/@langchain/groq@0.0.8:
resolution: {integrity: sha512-xqbe35K+12fiYtC/uqkaTT4AXxqL5uvhCrHzc+nBoFkTwM6YfTFE1ch95RZ5G2JnK1U9pKAre/trUSzlU1/6Kg==}
engines: {node: '>=18'}
dependencies:
'@langchain/core': 0.1.61
'@langchain/openai': 0.0.28
groq-sdk: 0.3.2
zod: 3.22.4
zod-to-json-schema: 3.22.5(zod@3.22.4)
transitivePeerDependencies:
- encoding
- supports-color
dev: false
/@langchain/mistralai@0.0.19:
resolution: {integrity: sha512-Uin/jve1NCZLAFa9dpOKzE3Y2+uSnMJQX5ria9vO3lnTGRlvBwcMhyGDoTYdI+gnQgHH4ceBoIBzJDlVG+WVWw==}
engines: {node: '>=18'}
@@ -16077,6 +16094,23 @@ packages:
resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==}
dev: true
/groq-sdk@0.3.2:
resolution: {integrity: sha512-Xp1xOea7nqUcTMndpiA8VkjZ05jM/eUUeCILxhRF+c2etBz/myQwRcUrr5lpWc0euIt96AiBMa9aYa0Iqrh13g==}
dependencies:
'@types/node': 18.16.16
'@types/node-fetch': 2.6.4
abort-controller: 3.0.0
agentkeepalive: 4.2.1
digest-fetch: 1.3.0
form-data-encoder: 1.7.2
formdata-node: 4.4.1
node-fetch: 2.7.0(encoding@0.1.13)
web-streams-polyfill: 3.2.1
transitivePeerDependencies:
- encoding
- supports-color
dev: false
/gtoken@6.1.2:
resolution: {integrity: sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==}
engines: {node: '>=12.0.0'}
@@ -21886,7 +21920,7 @@ packages:
resolution: {integrity: sha512-AwAuY4g9nxx0u52DnSMkqqgyLHaW/XaPLtaAo3y/ZCfeaQB/g4YDH4kb8Wc/mWzWvu0YjOznVnfn373MVZZrgw==}
engines: {node: '>=12.0.0'}
dependencies:
protobufjs: 7.2.4
protobufjs: 7.2.6
dev: false
/protobufjs-cli@1.1.1(protobufjs@7.2.4):
@@ -21927,6 +21961,24 @@ packages:
long: 5.2.3
dev: false
/protobufjs@7.2.6:
resolution: {integrity: sha512-dgJaEDDL6x8ASUZ1YqWciTRrdOuYNzoOf27oHNfdyvKqHr5i0FV7FSLU+aIeFjyFgVxrpTOtQUi0BLLBymZaBw==}
engines: {node: '>=12.0.0'}
dependencies:
'@protobufjs/aspromise': 1.1.2
'@protobufjs/base64': 1.1.2
'@protobufjs/codegen': 2.0.4
'@protobufjs/eventemitter': 1.1.0
'@protobufjs/fetch': 1.1.0
'@protobufjs/float': 1.0.2
'@protobufjs/inquire': 1.1.0
'@protobufjs/path': 1.1.2
'@protobufjs/pool': 1.1.0
'@protobufjs/utf8': 1.1.0
'@types/node': 18.16.16
long: 5.2.3
dev: false
/proxy-addr@2.0.7:
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
engines: {node: '>= 0.10'}